diff -Nru scons-4.4.0+dfsg/.appveyor/disable_msvc_10.ps1 scons-4.5.2+dfsg/.appveyor/disable_msvc_10.ps1 --- scons-4.4.0+dfsg/.appveyor/disable_msvc_10.ps1 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/.appveyor/disable_msvc_10.ps1 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -New-Item -Name exclude_list.txt -ItemType File; -$workaround_image = "Visual Studio 2015"; -if ($env:APPVEYOR_BUILD_WORKER_IMAGE -eq $workaround_image) { - Add-Content -Path 'exclude_list.txt' -Value 'test\MSVS\vs-10.0-exec.py'; -} diff -Nru scons-4.4.0+dfsg/.appveyor/exclude_tests.ps1 scons-4.5.2+dfsg/.appveyor/exclude_tests.ps1 --- scons-4.4.0+dfsg/.appveyor/exclude_tests.ps1 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/.appveyor/exclude_tests.ps1 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,8 @@ +New-Item -Name exclude_list.txt -ItemType File; + +# exclude VS 10.0 because it hangs the testing until this is resolved: +# https://help.appveyor.com/discussions/problems/19283-visual-studio-2010-trial-license-has-expired +$workaround_image = "Visual Studio 2015"; +if ($env:APPVEYOR_BUILD_WORKER_IMAGE -eq $workaround_image) { + Add-Content -Path 'exclude_list.txt' -Value 'test\MSVS\vs-10.0-exec.py'; +} diff -Nru scons-4.4.0+dfsg/.appveyor/install.bat scons-4.5.2+dfsg/.appveyor/install.bat --- scons-4.4.0+dfsg/.appveyor/install.bat 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/.appveyor/install.bat 2023-03-21 16:17:04.000000000 +0000 @@ -1,12 +1,13 @@ C:\\%WINPYTHON%\\python.exe --version for /F "tokens=*" %%g in ('C:\\%WINPYTHON%\\python.exe -c "import sys; print(sys.path[-1])"') do (set PYSITEDIR=%%g) REM use mingw 32 bit until #3291 is resolved +REM add python and python user-base to path for pip installs set PATH=C:\\%WINPYTHON%;C:\\%WINPYTHON%\\Scripts;C:\\ProgramData\\chocolatey\\bin;C:\\MinGW\\bin;C:\\MinGW\\msys\\1.0\\bin;C:\\cygwin\\bin;C:\\msys64\\usr\\bin;C:\\msys64\\mingw64\\bin;%PATH% C:\\%WINPYTHON%\\python.exe -m pip install -U --progress-bar off pip setuptools wheel -C:\\%WINPYTHON%\\python.exe -m pip install -U --progress-bar off coverage codecov -set STATIC_DEPS=true & C:\\%WINPYTHON%\\python.exe -m pip install -U --progress-bar off lxml -C:\\%WINPYTHON%\\python.exe -m pip install -U --progress-bar off -r requirements.txt -REM install 3rd party tools to test with -choco install --allow-empty-checksums dmd ldc swig vswhere xsltproc winflexbison -set SCONS_CACHE_MSVC_CONFIG=true + +REM requirements-dev.txt will skip installing lxml for windows and py 3.11+, where there's +REM no current binary wheel +C:\\%WINPYTHON%\\python.exe -m pip install -U --progress-bar off -r requirements-dev.txt + +choco install --allow-empty-checksums dmd ldc swig vswhere xsltproc winflexbison3 set diff -Nru scons-4.4.0+dfsg/.appveyor/install-cov.bat scons-4.5.2+dfsg/.appveyor/install-cov.bat --- scons-4.4.0+dfsg/.appveyor/install-cov.bat 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/.appveyor/install-cov.bat 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,2 @@ +for /F "tokens=*" %%g in ('C:\\%WINPYTHON%\\python.exe -c "import sys; print(sys.path[-1])"') do (set PYSITEDIR=%%g) +C:\\%WINPYTHON%\\python.exe -m pip install -U --progress-bar off coverage codecov diff -Nru scons-4.4.0+dfsg/.appveyor.yml scons-4.5.2+dfsg/.appveyor.yml --- scons-4.4.0+dfsg/.appveyor.yml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/.appveyor.yml 2023-03-21 16:17:04.000000000 +0000 @@ -17,72 +17,76 @@ - C:\ProgramData\chocolatey\lib -> appveyor.yml install: - # add python and python user-base to path for pip installs + # direct choco install supposed to work, but not? still doing in install.bat + #- cinst: dmd ldc swig vswhere ixsltproc winflexbison3 - cmd: .\.appveyor\install.bat + - cmd: if %COVERAGE% equ 1 .\.appveyor\install-cov.bat -# build matrix will be number of images multiplied by each '-' below, -# less any exclusions. -# split builds into sets of four jobs due to appveyor per-job time limit +# Build matrix will be number of images multiplied by #entries in matrix:, +# less any excludes. +# +# "Build" is kind of a misnomer - we are actually running the test suite, +# and this is slow on Windows, so keep the matrix as small as possible. +# Leaving the Coverage build on VS2017 for build-time reasons (1hr time limit). +# maybe move coverage to github in future to restore some flexibility? environment: + COVERAGE: 0 + SCONS_CACHE_MSVC_CONFIG: "true" matrix: + # Test oldest and newest supported Pythons, and a subset in between. + # Skipping 3.7 and 3.9 at this time + - WINPYTHON: "Python311" - WINPYTHON: "Python310" - COVERAGE: 0 - - - WINPYTHON: "Python37" - COVERAGE: 0 - WINPYTHON: "Python38" - COVERAGE: 0 - + - WINPYTHON: "Python36" COVERAGE: 1 - - - - # remove sets of build jobs based on criteria below # to fine tune the number and platforms tested matrix: exclude: - # test python 3.8 on Visual Studio 2017 image + # test python 3.6 on Visual Studio 2017 image - image: Visual Studio 2017 - WINPYTHON: "Python310" + WINPYTHON: "Python311" - image: Visual Studio 2017 - WINPYTHON: "Python37" + WINPYTHON: "Python310" - image: Visual Studio 2017 WINPYTHON: "Python38" - # test python 3.7 on Visual Studio 2019 image + # test python 3.8 on Visual Studio 2019 image + - image: Visual Studio 2019 + WINPYTHON: "Python311" - image: Visual Studio 2019 WINPYTHON: "Python310" - image: Visual Studio 2019 WINPYTHON: "Python36" - # test python 3.10 on Visual Studio 2022 image + # test python 3.10 and 3.11 on Visual Studio 2022 image - image: Visual Studio 2022 WINPYTHON: "Python36" - image: Visual Studio 2022 - WINPYTHON: "Python37" - - image: Visual Studio 2022 WINPYTHON: "Python38" -# remove some binaries we don't want to be found +# Remove some binaries we don't want to be found +# Note this is no longer needed, git-windows bin/ is quite minimal now. before_build: - ps: .\.appveyor\ignore_git_bins.ps1 build: off build_script: - - # exclude VS 10.0 because it hangs the testing until this is resolved: - # https://help.appveyor.com/discussions/problems/19283-visual-studio-2010-trial-license-has-expired - - ps: .\.appveyor\disable_msvc_10.ps1 + # Image version-based excludes: + # No excludes at the moment, but the exclude script generates the + # (possibly empty) exclude_list.txt which is used in the following step, + # so leave the scheme in place in case we need to put back excludes later. + - ps: .\.appveyor\exclude_tests.ps1 # setup coverage by creating the coverage config file, and adding coverage # to the sitecustomize so that all python processes start with coverage - - ps: .\.appveyor\coverage_setup.ps1 + - ps: if ($env:COVERAGE -eq 1) { .\.appveyor\coverage_setup.ps1 } # NOTE: running powershell from cmd is intended because # it formats the output correctly @@ -90,8 +94,7 @@ # run coverage even if there was a test failure on_finish: - - ps: .\.appveyor\coverage_report.ps1 - # running codecov in powershell causes an error so running in platform - # shells + - ps: if ($env:COVERAGE -eq 1) { .\.appveyor\coverage_report.ps1 } + # running codecov in powershell causes an error so running in cmd - cmd: if %COVERAGE% equ 1 codecov -X gcov --file coverage_xml.xml diff -Nru scons-4.4.0+dfsg/bin/scons_dev_master.py scons-4.5.2+dfsg/bin/scons_dev_master.py --- scons-4.4.0+dfsg/bin/scons_dev_master.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/bin/scons_dev_master.py 2023-03-21 16:17:04.000000000 +0000 @@ -42,7 +42,6 @@ # additional packages that Bill Deegan's web page suggests #'docbook-to-man', - #'docbook-xsl', #'docbook2x', #'tetex-bin', #'tetex-latex', @@ -86,9 +85,13 @@ 'openjdk-8-jdk', 'swig', 'texlive-base-bin', + 'texlive-font-utils', 'texlive-extra-utils', 'texlive-latex-base', 'texlive-latex-extra', + 'texlive-bibtex-extra', + 'docbook-xsl', + 'biber', 'zip', ] diff -Nru scons-4.4.0+dfsg/bin/update_doc_files.sh scons-4.5.2+dfsg/bin/update_doc_files.sh --- scons-4.4.0+dfsg/bin/update_doc_files.sh 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/bin/update_doc_files.sh 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,5 @@ +#!/bin/bash + +python bin/docs-update-generated.py +python bin/docs-validate.py +python bin/docs-create-example-outputs.py diff -Nru scons-4.4.0+dfsg/bin/upload-release-files.sh scons-4.5.2+dfsg/bin/upload-release-files.sh --- scons-4.4.0+dfsg/bin/upload-release-files.sh 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/bin/upload-release-files.sh 2023-03-21 16:17:04.000000000 +0000 @@ -27,7 +27,7 @@ README.rst \ $SF_USER@$SF_MACHINE:$SF_TOPDIR/scons/ - + # Upload main scons release files: $RSYNC $RSYNCOPTS \ SCons-$VERSION.tar.gz \ @@ -39,6 +39,7 @@ $RSYNC $RSYNCOPTS \ scons-local-$VERSION.tar.gz \ scons-local-$VERSION.zip \ + scons-local-$VERSION.pyz \ CHANGES.txt RELEASE.txt \ $SF_USER@$SF_MACHINE:$SF_TOPDIR/scons-local/$VERSION/ diff -Nru scons-4.4.0+dfsg/CHANGES.txt scons-4.5.2+dfsg/CHANGES.txt --- scons-4.4.0+dfsg/CHANGES.txt 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/CHANGES.txt 2023-03-21 16:17:04.000000000 +0000 @@ -7,6 +7,179 @@ NOTE: The 4.0.0 Release of SCons dropped Python 2.7 Support NOTE: 4.3.0 now requires Python 3.6.0 and above. Python 3.5.x is no longer supported +RELEASE 4.5.2 - Sun, 21 Mar 2023 14:08:29 -0700 + + From Michał Górny: + - Remove the redundant `wheel` dependency from `pyproject.toml`, + as it is added automatically by the setuptools PEP517 backend. + + From Mats Wichmann + - Fix a problem (#4321) in 4.5.0/4.5.1 where ParseConfig could cause an + exception in MergeFlags when the result would be to add preprocessor + defines to existing CPPDEFINES. The following code illustrates the + circumstances that could trigger this: + env=Environment(CPPDEFINES=['a']) + env.Append(CPPDEFINES=['b']) + env.MergeFlags({'CPPDEFINES': 'c'}) + + +RELEASE 4.5.1 - Mon, 06 Mar 2023 14:08:29 -0700 + + From Mats Wichmann + - Fix a problem in 4.5.0 where using something like the following code + will cause a Clone()'d environment to share the CPPDEFINES with the + original Environment() which was cloned. Causing leakage of changes + to CPPDEFINES when they should be completely independent after the Clone. + env=Environment(CPPDEFINES=['a']) + env.Append(CPPDEFINES=['b']) (or AppendUnique,Prepend,PrependUnique) + env1=env.Clone() + env1.Append(CPPDEFINES=['c']) (or any other modification, but not overwriting CPPDEFINES + Now env['CPPDEFINES'] will contain 'c' when it should not. + + +RELEASE 4.5.0 - Sun, 05 Mar 2023 14:08:29 -0700 + + From Anatoli Babenia: + - Do not initialize DefaultEnvironment when calling EnsureSConsVersion(), + EnsurePythonVersion(), Exit(), GetLaunchDir() and SConscriptChdir(). + - Remove unused private method SConsEnvironment._exceeds_version(). + + From William Deegan: + - Added ValidateOptions() which will check that all command line options are in either + those specified by SCons itself, or by AddOption() in SConstruct/SConscript. It should + not be called until all AddOption() calls are completed. Resolves Issue #4187 + - Refactored SCons/Taskmaster into a package. Moved SCons/Jobs.py into that package. + NOTE: If you hook into SCons.Jobs, you'll have to change that to use SCons.Taskmaster.Jobs + - Changed the Taskmaster trace logic to use python's logging module. The output formatting + should remain (mostly) the same. Minor update to unittest for this to adjust for 1 less newline. + - Migrated logging logic for --taskmastertrace to use Python's logging module. Added logging + to NewParallel Job class (Andrew Morrow's new parallel job implementation) + - Ninja: Fix execution environment sanitation for launching ninja. Previously if you set an + execution environment variable set to a python list it would crash. Now it + will create a string joining the list with os.pathsep + - Move execution environment sanitation from Action._subproc() to + SCons.Util.sanitize_shell_env(ENV) + - Moved rpm and debian directories under packaging + - Added logic to help packagers enable reproducible builds into packaging/etc/. Please + read packaging/etc/README.txt if you are interested. + - Added --experimental=tm_v2, which enables Andrew Morrow's new NewParallel Job implementation. + This should scale much better for highly parallel builds. You can also enable this via SetOption(). + - Fixed command line argument --diskcheck: previously a value of 'none' was ignored. + SetOption('diskcheck','none') is unaffected, as it did not have the problem. + - Added overrides argument to SCons.Subst.scons_subst(), subst_list(), subst(), and Action's process(), + strfunction(). This allows passing a dictionary of envvars to override when evaluating subst()'d strings/lists + - Fixed Issue #4275 - when outputting compilation db and TEMPFILE was in use, the compilation db would have + command lines using the generated tempfile for long command lines, instead of the full command line for + the compilation step for the source/target pair. + - Renamed the qt tools to qt3 since the logic in that tool is only for QT version 3. Renamed all env vars + which affect qt3 from QT_ to QT3_. If you are still using SCons to build QT 3 code, you'll need to update + your SConscripts. Note that using 'qt' tool has been deprecated for some time. + + From David H: + - Added JAVAPROCESSORPATH construction variable which populates -processorpath. + - Updated JavaScanner to scan JAVAPROCESSORPATH. + + From Nickolai Korshunov + - Added FILE_ENCODING, to allow explicitly setting the text encoding for files + written by the Textfile() and Substfile() builders. If not specified, Textfile() and Substfile() builders + will write files as UTF-8. Fixed Issue #4302. + + From Dan Mezhiborsky: + - Add newline to end of compilation db (compile_commands.json). + + From Daniel Moody: + - Added error message to handle the case when SCons attempts to retrieve all the targets + for a specified builder from the CacheDir, fails to do so, and then runs into an error + when deleting the files which were retrieved. Previously if this happened there was no + errors or warnings. + - Fix issue #2757, where Configure checks that perform a check which reads a modified source + (including program, source or header file(s)) would incorrectly mark that file "up to date" so the + actual build would not see the file as modified. Leading to incorrect incremental builds. + Now configure checks now clear node info for non conftest nodes, so they will be re-evaluated for + the real taskmaster run when the build commences. + + From Andrew Morrow + - Avoid returning UniqueList for `children` and other `Executor` APIs. This type + iterates more slowly than the builtin types. Also simplify uniquer_hashables to + use an faster implementation under the assumption of ordered dictionaries. + + From Ryan Saunders: + - Fixed runtest.py failure on Windows caused by excessive escaping of the path to python.exe. + + From Lukas Schrangl: + - Run LaTeX after biber/bibtex only if necessary + + From Flaviu Tamas: + - Added -fsanitize support to ParseFlags(). This will propagate to CCFLAGS and LINKFLAGS. + + From Mats Wichmann: + - A list argument as the source to the Copy() action function is now + correctly handled by converting elements to string. Copy errors out + if asked to copy a list to an existing non-directory destination. + Both the implementation and the strfunction which prints the progress + message were adjusted. Fixes #3009. + - doc: EnsureSConsVersion, EnsurePythonVersion, Exit, GetLaunchDir and + SConscriptChdir are now listed as Global functions only; the + Environment versions still work but are not documented. + - The Java scanner processing of JAVACLASSPATH for dependencies was + changed to split on os.pathsep instead of space, to match usage of + passing a path string like "xxx:yyy:zzz". This is not portable - + passing a POSIX-style path string (with ':') won't work on Windows + (';') - which is now documented with a hint to use a list instead + to be portable. Splitting on space broke paths with embedded spaces. + Fixes #4243. + - Cleanup: make sure BoolVariable usage in tests and examples uses Python + boolean values instead of 0/1. + - Stop telling people to run "python setup.py install" in the User Guide. + Adds new content on using virtualenvs to be able to have multiple + different SCons versions available on one system. + - Added the "DefaultEnvironment(tools=[])" stanza to a number of tests + that are known to be particularly slow. It's still just a tiny + speedup, but the Windows CI had been occasionally timing out, + so maybe this helps a bit. + - Remove an extra existence check in one ninja test that caused it + to be skipped on some otherwise-valid Windows installations. + - test framework tests now pass on Linux and Windows (the latter can + still run into problems on some configurations), and automated + tests are now run on changes in this area so future problems can + be spotted. + - The single-file Util module was split into a package with a few + functional areas getting their own files - Util.py had grown to + over 2100 lines. + - Add a zipapp package of scons-local: can use SCons from a local + file which does not need unpacking. + - Additional explanations for MSVSProject and MSVSSolution builders. + - Fix a problem (present in 4.4.0 only) where a Java inner class could + not be cached because the emitted filename contained a '$' and when + looked up through a node ended up generating a Python SyntaxError + because it was passed through scons_subst(). + - Have the gfortran tool do a better job of honoring user preferences + for the dialect tools (F77, F90, F03 and F09, as well as the shared-library + equivalents SHF77, SHF90, SHF03, SHF09). Previously these were + unconditionally overwritten to 'gfortran'; the change should be more + in line with expectations of how these variables should work. + Also cleaned a few Fortran tests - test behavior does not change. + - Updated MSVC documentation - adds "version added" annotations on recently + added construction variables and provides a version-mapping table. + - Add Python 3.12 support, and indicate 3.11/3.12 support in package. + 3.12 is in alpha for this SCons release, the bytecode sequences + embedded in SCons/ActionTests.py may need to change later, but + based on what is known now, 3.12 itself should work with this release. + - Add "append" keyword argument to Configure context's CheckLib and + CheckLibWithHeader to control whether to append or prepend (issue #2767) + Also added "unique" keyword, to control whether a library is added + or not if it is already in the $LIBS construction var in the + configure context. (issue #2768). + - Completely refactored the CPPDEFINES logic in Append/AppendUnique/Prepend/PrependUnique + This change fixes the following GH Issues: + - GH Issue #3876 - Append() and AppendUnique() will handle CPPDEFINES the same + - GH Issue #4254 - Make handling tuples in CPPDEFINES consistent. + - We no longer sort the keys added to CPPDEFINES by their dictionary keys. + We take advantage that their order is now stable based on insertion order + in Python 3.5+ + - Added/modifed unit and system tests to verify these changes. + + RELEASE 4.4.0 - Sat, 30 Jul 2022 14:08:29 -0700 From Joseph Brill: @@ -167,16 +340,16 @@ - Ninja:Added user configurable setting of ninja depfile format via NINJA_DEPFILE_PARSE_FORMAT. Now setting NINJA_DEPFILE_PARSE_FORMAT to [msvc,gcc,clang] can force the ninja expected format. Compiler tools will also configure the variable automatically. - - Ninja: Made ninja tool force the ninja file as the only target. + - Ninja: Made ninja tool force the ninja file as the only target. - Ninja: Improved the default targets setup and made sure there is always a default target for the ninja file, which excludes targets that start and stop the daemon. - Ninja: Update ninja tool so targets passed to SCons are propagated to ninja when scons automatically executes ninja. - - Small refactor of scons daemons using a shared StateInfo class for communication + - Small refactor of scons daemons using a shared StateInfo class for communication between the scons interactive thread and the http server thread. Added error handling for scons interactive failing to startup. - Ninja: Updated ninja scons daemon scripts to output errors to stderr as well as the daemon log. - - Ninja: Fix typo in ninja scons daemon startup which causes ConnectionRefusedError to not retry + - Ninja: Fix typo in ninja scons daemon startup which causes ConnectionRefusedError to not retry - Added SHELL_ENV_GENERATORS construction variable. This variable should be set to a list (or an iterable) which contains functions to be called in order when constructing the execution environment (Generally this is the shell environment @@ -418,7 +591,7 @@ - Fix Issue #3906 - `IMPLICIT_COMMAND_DEPENDENCIES` was not properly disabled when set to any string value (For example ['none','false','no','off']) Also previously 'All' wouldn't have the desired affect. - + From Ivan Kravets: - Provide a custom argument escape function for `TempFileMunge` using a new `TEMPFILEARGESCFUNC` variable. Useful if you need to apply extra operations on @@ -952,7 +1125,6 @@ - JSON encoding errors for CacheDir config - JSON decoding errors for CacheDir config - RELEASE 3.1.0 - Mon, 20 Jul 2019 16:59:23 -0700 From Joseph Brill: diff -Nru scons-4.4.0+dfsg/CONTRIBUTING.rst scons-4.5.2+dfsg/CONTRIBUTING.rst --- scons-4.4.0+dfsg/CONTRIBUTING.rst 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/CONTRIBUTING.rst 2023-03-21 16:17:04.000000000 +0000 @@ -2,7 +2,7 @@ ##################### Introduction -=========== +============ Thanks for taking the time to contribute to SCons! @@ -40,7 +40,7 @@ The SCons project welcomes bug reports and feature requests, but we *do* have a preference for having talked about them first - we request you send an email to the -`SCons Users Mailing List `_ +`SCons Users Mailing List `_ or hop on the Discord channel (see link above), and if so instructed, then proceed to an issue report. @@ -106,7 +106,7 @@ light extension of Docbook-XML, and the doc build consists of translating to pure docbook, then using standard tools to generate HTML and PDF outputs from that. There's lots more -on the documentation process at the Documentation Toolchain page:: +on the documentation process at the Documentation Toolchain page: https://github.com/SCons/scons/blob/master/doc/overview.rst @@ -132,7 +132,7 @@ There is another approach that kind of reverses that order: construct a Python virtualenv and install the development tree in it. -If you're not familiar with virtualenvs, there's an example here:: +If you're not familiar with virtualenvs, there's an example here: https://scons-cookbook.readthedocs.io/en/latest/#setting-up-a-python-virtualenv-for-scons @@ -172,7 +172,22 @@ project, such as compilers, documentation production tools, etc. should of course be installed by the appropriate means. In order to develop SCons and run its test suite, there are some dependencies, -listed in the ``requirements.txt`` file. +listed in the ``requirements-dev.txt`` file. Install these with:: + + $ python -m pip install -r requirements-dev.txt + +For building the SCons packages and documentation there are some further +requirements, you can get these with:: + + $ python -m pip install -r requirements-pkg.txt + +The requirements are inclusive so you only need the latter to get +everything installed. + +There are other, non-Python requirements to do a doc build. These +are system-specific. See bin/scons_dev_master.py for the set up that +works for Ubuntu systems. + Making Changes ============== @@ -180,15 +195,15 @@ Virtually all of the SCons functionality exists in the "build engine," the ``SCons`` subdirectory hierarchy that contains all of the modules that make up SCons. The ``scripts/scons.py`` wrapper script exists mainly to find -the appropriate build engine library and then execute it. +the appropriate build engine module and execute it. In order to make your own changes locally and test them by hand, simply edit modules in the local ``SCons`` subdirectory tree and then run -(see the section above about `Executing SCons Without Installing`_):: +(see the section `Executing SCons Without Installing`_):: $ python scripts/scons.py [arguments] -(or, if using the virtualenv/editable approach, ``scons [arguents]``) +Or, if using the virtualenv/editable approach: ``scons [arguments]`` Note that the regular SCons development process makes heavy use of automated testing. See the `Testing`_ and `Typical Development Workflow`_ sections below for more @@ -320,7 +335,8 @@ Typical Development Workflow ============================ - Caveat: The point of this section isn't to describe one dogmatic workflow. +.. hint:: + The point of this section is not to describe one dogmatic workflow. Just running the test suite can be time-consuming, and getting a patch to pass all of the tests can be more so. If you're genuinely blocked, it may make more sense to submit a patch with a note about which tests still @@ -357,7 +373,9 @@ $ python runtest.py -a -o test.log - Be patient, there are more than 1100 test scripts in the whole suite! + Be patient, there are more than 1100 test scripts in the whole suite + (using a ``-j`` option pretty much always helps. For example, if you have + an 8-core processor, try ``runtest.py -j 8```). If any test scripts fail, they will be listed in a summary at the end of the log file. Some test scripts may also report NO RESULT because (for example) @@ -392,11 +410,21 @@ looking for regressions by just running the ``test/Java/\*.py`` tests instead of running all tests with ``runtest.py -a``. +- To actually submit the fix and any test work as a Pull Request, + there will be some version control steps. For example:: + + $ git checkout -b fix-1387 + $ git modified # check that this reports your expected list + $ git add `git modified` + $ git commit -s # fill in a good description of your changes + + And proceed to push the change as a PR. + Building Packages ================= -We use SCons (version 3.1.2 or later) to build its own packages. If you +We use SCons (version 3.1.2 or newer) to build its own packages. If you already have an appropriate version of SCons installed on your system, you can build everything by simply running it:: @@ -411,12 +439,12 @@ Those are full builds: depending on the utilities installed on your system, any or all of the following packages will be built:: - SCons-4.3.0-py3-none-any.whl - SCons-4.3.0ayyyymmdd.tar.gz - SCons-4.3.0ayyyymmdd.zip - scons-doc-4.3.0ayyyymmdd.tar.gz - scons-local-4.3.0ayyyymmdd.tar.gz - scons-local-4.3.0ayyyymmdd.zip + SCons-4.4.0-py3-none-any.whl + SCons-4.4.0ayyyymmdd.tar.gz + SCons-4.4.0ayyyymmdd.zip + scons-doc-4.4.0ayyyymmdd.tar.gz + scons-local-4.4.0ayyyymmdd.tar.gz + scons-local-4.4.0ayyyymmdd.zip The ``SConstruct`` file is supposed to be smart enough to avoid trying to build packages for which you don't have the proper utilities installed. @@ -465,14 +493,14 @@ Obsolete packaging logic - ignore this. debian/ - Files needed to construct a Debian package. The contents of this directory - are dictated by the + Files needed to construct a Debian package. + The contents of this directory are dictated by the `Debian Policy Manual `). The package will not be accepted into the Debian distribution unless the contents of this directory satisfy the relevant Debian policies. - At this point, this is a sample; SCons is packaged in the Debian - project (and thus inherited by projects which derive from it, if - they haven't made their own packages). See: + At this point, this is a sample; SCons is packaged for Debian by the + Debian project itself (and thus inherited by projects which derive from it, + if they haven't made their own packages). See: - `Debian scons packages `_ - `Ubuntu scons packages `_ @@ -528,8 +556,8 @@ documentation stubs kept together with pieces of the engine. test/ - End-to-end tests of the SCons utility itself. These are separate from the - individual module unit tests. + End-to-end tests of the SCons utility itself. + These are separate from the individual module unit tests. testing/ SCons testing framework. diff -Nru scons-4.4.0+dfsg/debian/changelog scons-4.5.2+dfsg/debian/changelog --- scons-4.4.0+dfsg/debian/changelog 2022-09-20 15:35:41.000000000 +0000 +++ scons-4.5.2+dfsg/debian/changelog 2023-10-08 22:58:33.000000000 +0000 @@ -1,3 +1,16 @@ +scons (4.5.2+dfsg-1) unstable; urgency=medium + + [ Debian Janitor ] + * debian/copyright: use spaces rather than tabs to start continuation lines. + * Bump debhelper from old 12 to 13. + * Set upstream metadata fields: Bug-Database, Bug-Submit, Repository-Browse. + + [ Laszlo Boszormenyi (GCS) ] + * New major upstream release. + * Update Standards-Version to 4.6.2 . + + -- Laszlo Boszormenyi (GCS) Mon, 09 Oct 2023 00:58:33 +0200 + scons (4.4.0+dfsg-1) unstable; urgency=medium * New upstream release: diff -Nru scons-4.4.0+dfsg/debian/control scons-4.5.2+dfsg/debian/control --- scons-4.4.0+dfsg/debian/control 2022-09-20 15:35:41.000000000 +0000 +++ scons-4.5.2+dfsg/debian/control 2023-10-08 22:58:33.000000000 +0000 @@ -3,7 +3,7 @@ Priority: optional Maintainer: Laszlo Boszormenyi (GCS) Build-Depends: - debhelper-compat (= 12), + debhelper-compat (= 13), dh-python, python3-all (>= 3.5), fop, @@ -14,7 +14,7 @@ gdc , python3-psutil , ldc [amd64] -Standards-Version: 4.6.1 +Standards-Version: 4.6.2 Rules-Requires-Root: no Homepage: https://www.scons.org/ Vcs-Git: https://salsa.debian.org/debian/scons.git diff -Nru scons-4.4.0+dfsg/debian/copyright scons-4.5.2+dfsg/debian/copyright --- scons-4.4.0+dfsg/debian/copyright 2020-08-23 18:25:23.000000000 +0000 +++ scons-4.5.2+dfsg/debian/copyright 2023-10-08 22:58:33.000000000 +0000 @@ -23,14 +23,14 @@ 2003-2008 Mark Brown 2008-2014 Luca Falavigna 2014-2020 Jörg Frings-Fürst - 2020 Ben Hutchings - 2018-2020 Laszlo Boszormenyi (GCS) + 2020 Ben Hutchings + 2018-2023 Laszlo Boszormenyi (GCS) License: GPL-3.0+ Files: debian/patches/* Copyright: 2010-2014 Luca Falavigna 2014-2019 Jörg Frings-Fürst - 2020 Ben Hutchings + 2020 Ben Hutchings License: Expat License: Expat diff -Nru scons-4.4.0+dfsg/debian/upstream/metadata scons-4.5.2+dfsg/debian/upstream/metadata --- scons-4.4.0+dfsg/debian/upstream/metadata 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/debian/upstream/metadata 2023-10-08 22:58:33.000000000 +0000 @@ -0,0 +1,4 @@ +--- +Bug-Database: https://github.com/SCons/scons/issues +Bug-Submit: https://github.com/SCons/scons/issues/new +Repository-Browse: https://github.com/SCons/scons diff -Nru scons-4.4.0+dfsg/doc/design/chtml.xsl scons-4.5.2+dfsg/doc/design/chtml.xsl --- scons-4.4.0+dfsg/doc/design/chtml.xsl 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/design/chtml.xsl 2023-03-21 16:17:04.000000000 +0000 @@ -1,57 +1,57 @@ - - - - - - - - - - - - -/appendix toc,title -article/appendix nop -/article toc,title -book toc,title,figure,table,example,equation -/chapter toc,title -part toc,title -/preface toc,title -reference toc,title -/sect1 toc -/sect2 toc -/sect3 toc -/sect4 toc -/sect5 toc -/section toc -set toc,title - - - - + + + + + + + + + + + + +/appendix toc,title +article/appendix nop +/article toc,title +book toc,title,figure,table,example,equation +/chapter toc,title +part toc,title +/preface toc,title +reference toc,title +/sect1 toc +/sect2 toc +/sect3 toc +/sect4 toc +/sect5 toc +/section toc +set toc,title + + + + diff -Nru scons-4.4.0+dfsg/doc/design/html.xsl scons-4.5.2+dfsg/doc/design/html.xsl --- scons-4.4.0+dfsg/doc/design/html.xsl 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/design/html.xsl 2023-03-21 16:17:04.000000000 +0000 @@ -1,151 +1,151 @@ - - - - - - - - - - - - - -/appendix toc,title -article/appendix nop -/article toc,title -book toc,title,figure,table,example,equation -/chapter toc,title -part toc,title -/preface toc,title -reference toc,title -/sect1 toc -/sect2 toc -/sect3 toc -/sect4 toc -/sect5 toc -/section toc -set toc,title - - - - - - - - - - - - - - - - - - - - - - <xsl:copy-of select="$title"/> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + +/appendix toc,title +article/appendix nop +/article toc,title +book toc,title,figure,table,example,equation +/chapter toc,title +part toc,title +/preface toc,title +reference toc,title +/sect1 toc +/sect2 toc +/sect3 toc +/sect4 toc +/sect5 toc +/section toc +set toc,title + + + + + + + + + + + + + + + + + + + + + + <xsl:copy-of select="$title"/> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -Nru scons-4.4.0+dfsg/doc/design/pdf.xsl scons-4.5.2+dfsg/doc/design/pdf.xsl --- scons-4.4.0+dfsg/doc/design/pdf.xsl 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/design/pdf.xsl 2023-03-21 16:17:04.000000000 +0000 @@ -1,73 +1,73 @@ - - - - - - - - - - - - -0pt - - - - -/appendix toc,title -article/appendix nop -/article toc,title -book toc,title,figure,table,example,equation -/chapter toc,title -part toc,title -/preface toc,title -reference toc,title -/sect1 toc -/sect2 toc -/sect3 toc -/sect4 toc -/sect5 toc -/section toc -set toc,title - - - - bold - - - - - - - - - - - + + + + + + + + + + + + +0pt + + + + +/appendix toc,title +article/appendix nop +/article toc,title +book toc,title,figure,table,example,equation +/chapter toc,title +part toc,title +/preface toc,title +reference toc,title +/sect1 toc +/sect2 toc +/sect3 toc +/sect4 toc +/sect5 toc +/section toc +set toc,title + + + + bold + + + + + + + + + + + diff -Nru scons-4.4.0+dfsg/doc/generated/builders.gen scons-4.5.2+dfsg/doc/generated/builders.gen --- scons-4.4.0+dfsg/doc/generated/builders.gen 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/generated/builders.gen 2023-03-21 16:17:04.000000000 +0000 @@ -624,9 +624,9 @@ -env.Java(target = 'classes', source = 'src') -env.Java(target = 'classes', source = ['src1', 'src2']) -env.Java(target = 'classes', source = ['File1.java', 'File2.java']) +env.Java(target='classes', source='src') +env.Java(target='classes', source=['src1', 'src2']) +env.Java(target='classes', source=['File1.java', 'File2.java']) @@ -768,7 +768,7 @@ Builds an output file from a moc input file. moc input files are either header files or C++ files. This builder is only available after using the -tool &t-link-qt;. See the &cv-link-QTDIR; variable for more information. +tool &t-link-qt3;. See the &cv-link-QT3DIR; variable for more information. Example: @@ -832,70 +832,106 @@ MSVSProject() env.MSVSProject() - Builds a Microsoft Visual Studio project file, and by default - builds a solution file as well. + Build a Microsoft Visual C++ project file and solution file. - This builds a Visual Studio project file, based on the - version of Visual Studio that is configured (either the - latest installed version, or the version specified by - &cv-link-MSVS_VERSION; in the Environment constructor). For - Visual Studio 6, it will generate a .dsp - file. For Visual Studio 7, 8, and 9, it will - generate a .vcproj file. For Visual - Studio 10 and later, it will generate a - .vcxproj file. - + Builds a C++ project file based on the + version of Visual Studio (or to be more precise, of MSBuild) + that is configured: either the latest installed version, + or the version specified by + &cv-link-MSVC_VERSION; in the current &consenv;. + For Visual Studio 6.0 a .dsp file is generated. + For Visual Studio versions 2002-2008, + a .vcproj file is generated. + For Visual Studio 2010 and later a .vcxproj + file is generated. + Note there are multiple versioning schemes involved in + the Microsoft compilation environment - + see the description of &cv-link-MSVC_VERSION; for equivalences. + &SCons; does not know how to construct project files for + other languages (such as .csproj for C#, + .vbproj for Visual Basic or + .pyproject for Python)). + - By default, this also generates a solution file for the - specified project, a .dsw file for - Visual Studio 6 or a .sln file for - Visual Studio 7 and later. This behavior may be disabled by - specifying auto_build_solution=0 when you - call &b-MSVSProject;, in which case you presumably want to - build the solution file(s) by calling the &b-MSVSSolution; - Builder (see below). + For the .vcxproj file, the underlying + format is the MSBuild XML Schema, and the details conform to: + + https://learn.microsoft.com/en-us/cpp/build/reference/vcxproj-file-structure. + The generated solution file enables Visual Studio to + understand the project structure, and allows building it + using MSBuild to call back to &SCons;. + The project file encodes a toolset version that has been + selected by &SCons; as described above. Since recent Visual + Studio versions support multiple concurrent toolsets, + use &cv-link-MSVC_VERSION; to select the desired one if + it does not match the &SCons; default. + The project file also includes entries which describe + how to call &SCons; to build the project from within Visual Studio + (or from an MSBuild command line). + In some situations &SCons; may generate this incorrectly - + notably when using the scons-local + distribution, which is not installed in a way that that + matches the default invocation line. + If so, the &cv-link-SCONS_HOME; &consvar; can be used to describe + the right way to locate the &SCons; code so that it can be imported. - The &b-MSVSProject; builder takes several lists of filenames - to be placed into the project file. These are currently - limited to srcs, incs, - localincs, resources, and - misc. These are pretty self-explanatory, - but it should be noted that these lists are added to the - &cv-link-SOURCES; construction variable as strings, NOT as - SCons File Nodes. This is because they represent file names - to be added to the project file, not the source files used - to build the project file. + By default, a matching solution file for the project is also generated. + This behavior may be disabled by + specifying auto_build_solution=0 + to the &b-MSVSProject; builder. + The solution file can also be independently + generated by calling the &b-MSVSSolution; builder, + such as in the case where a solution should describe + multiple projects. + See the &b-link-MSVSSolution; description for further information. - The above filename lists are all optional, although at least - one must be specified for the resulting project file to + The &b-MSVSProject; builder accepts several keyword arguments + describing lists of filenames to be placed into the project file. + Currently, + srcs, + incs, + localincs, + resources, + and misc + are recognized. + The names are intended to be self-explanatory, but note that the + filenames need to be specified as strings, not + as &SCons; File Nodes + (for example if you generate files for inclusion by using the + &f-link-Glob; function, the results should be converted to + a list of strings before passing them to &b-MSVSProject;). + This is because Visual Studio and MSBuild know nothing about &SCons; + Node types. + Each of the filename lists are individually optional, but at + least one list must be specified for the resulting project file to be non-empty. In addition to the above lists of values, the following values - may be specified: + may be specified as keyword arguments: - target + target The name of the target .dsp or .vcproj file. The correct suffix for the version of Visual Studio must be used, but the &cv-link-MSVSPROJECTSUFFIX; - construction variable will be defined to the correct + &consvar; will be defined to the correct value (see example below). - variant + variant - The name of this particular variant. For Visual Studio 7 + The name of this particular variant. Except for Visual Studio 6 projects, this can also be a list of variant names. These are typically things like "Debug" or "Release", but really can be anything you want. For Visual Studio @@ -910,145 +946,184 @@ - cmdargs + cmdargs Additional command line arguments for the different variants. The number of - cmdargs entries must match the number - of variant entries, or be empty (not + cmdargs entries must match the number + of variant entries, or be empty (not specified). If you give only one, it will automatically be propagated to all variants. - cppdefines + cppdefines Preprocessor definitions for the different variants. - The number of cppdefines entries - must match the number of variant + The number of cppdefines entries + must match the number of variant entries, or be empty (not specified). If you give only one, it will automatically be propagated to all - variants. If you don't give this parameter, SCons + variants. If you don't give this parameter, &SCons; will use the invoking environment's - CPPDEFINES entry for all variants. + &cv-link-CPPDEFINES; entry for all variants. - cppflags + cppflags Compiler flags for the different variants. - If a /std:c++ flag is found then /Zc:__cplusplus is - appended to the flags if not already found, this - ensures that intellisense uses the /std:c++ switch. - The number of cppflags entries - must match the number of variant + If a flag is found then + is appended to the + flags if not already found, this ensures that Intellisense + uses the switch. + The number of cppflags entries + must match the number of variant entries, or be empty (not specified). If you give only one, it will automatically be propagated to all variants. If you don't give this parameter, SCons will combine the invoking environment's - CCFLAGS, CXXFLAGS, - CPPFLAGS entries for all variants. + &cv-link-CCFLAGS;, &cv-link-CXXFLAGS;, + &cv-link-CPPFLAGS; entries for all variants. - cpppaths + cpppaths Compiler include paths for the different variants. - The number of cpppaths entries - must match the number of variant + The number of cpppaths entries + must match the number of variant entries, or be empty (not specified). If you give only one, it will automatically be propagated to all variants. If you don't give this parameter, SCons will use the invoking environment's - CPPPATH entry for all variants. + &cv-link-CPPPATH; entry for all variants. - buildtarget + buildtarget An optional string, node, or list of strings or nodes (one per build variant), to tell the Visual Studio debugger what output target to use in what build variant. The number of - buildtarget entries must match the - number of variant entries. + buildtarget entries must match the + number of variant entries. - runfile + runfile The name of the file that Visual Studio 7 and later will run and debug. This appears as the - value of the Output field in the - resulting Visual Studio project file. If this is not + value of the Output field in the + resulting Visual C++ project file. If this is not specified, the default is the same as the specified - buildtarget value. + buildtarget value. + + + &SCons; and Microsoft Visual Studio understand projects in + different ways, and the mapping is sometimes imperfect: + - Note that because &SCons; always executes its build commands + Because &SCons; always executes its build commands from the directory in which the &SConstruct; file is located, if you generate a project file in a different directory - than the &SConstruct; directory, users will not be able to + than the directory of the &SConstruct; file, users will not be able to double-click on the file name in compilation error messages displayed in the Visual Studio console output window. This can - be remedied by adding the Visual C/C++ /FC + be remedied by adding the Visual C/C++ compiler option to the &cv-link-CCFLAGS; variable so that the compiler will print the full path name of any files that cause compilation errors. + + If the project file is only used to teach the Visual Studio + project browser about the file layout there should be no issues, + However, Visual Studio should not be used to make changes + to the project structure, build options, etc. as these will + (a) not feed back to the &SCons; description of the project + and (b) be lost if &SCons; regenerates the project file. + The SConscript files should remain the definitive description + of the build. + + + If the project file is used to drive MSBuild (such as selecting + "build" from the Visual Studio interface) you lose the direct + control of target selection and command-line options you would + have if launching the build directly from &SCons;, + because these will be hardcoded in the project file to the + values specified in the &b-MSVSProject; call. + You can regain some of this control by defining multiple variants, + using multiple &b-MSVSProject; calls to arrange different build + targets, arguments, defines, flags and paths for different variants. + + + If the build is divided into a solution with multiple MSBuild + projects the mapping is further strained. In this case, + it is important not to set Visual Studio to do parallel builds, + as it will then launch the separate project builds in parallel, + and &SCons; does not work well if called that way. + Instead you can set up the &SCons; build for parallel building - + see the &f-link-SetOption; function for how to do this with + num_jobs. + + + Example usage: barsrcs = ['bar.cpp'] barincs = ['bar.h'] barlocalincs = ['StdAfx.h'] -barresources = ['bar.rc','resource.h'] +barresources = ['bar.rc', 'resource.h'] barmisc = ['bar_readme.txt'] -dll = env.SharedLibrary(target='bar.dll', - source=barsrcs) +dll = env.SharedLibrary(target='bar.dll', source=barsrcs) buildtarget = [s for s in dll if str(s).endswith('dll')] -env.MSVSProject(target='Bar' + env['MSVSPROJECTSUFFIX'], - srcs=barsrcs, - incs=barincs, - localincs=barlocalincs, - resources=barresources, - misc=barmisc, - buildtarget=buildtarget, - variant='Release') +env.MSVSProject( + target='Bar' + env['MSVSPROJECTSUFFIX'], + srcs=barsrcs, + incs=barincs, + localincs=barlocalincs, + resources=barresources, + misc=barmisc, + buildtarget=buildtarget, + variant='Release', +) - - Starting with version 2.4 of SCons it is - also possible to specify the optional argument - DebugSettings, which creates files - for debugging under Visual Studio: - + - DebugSettings + DebugSettings A dictionary of debug settings that get written to the .vcproj.user or the .vcxproj.user file, depending on the - version installed. As it is done for cmdargs (see above), + version installed. As for cmdargs, you can specify a DebugSettings dictionary per variant. If you give only one, it will be propagated to all variants. + + Changed in version 2.4: + Added the optional DebugSettings parameter. + @@ -1072,12 +1147,17 @@ # Check command args to force one Microsoft Visual Studio version if msvcver == '9' or msvcver == '11': - env = Environment(MSVC_VERSION=msvcver+'.0', MSVC_BATCH=False) + env = Environment(MSVC_VERSION=msvcver + '.0', MSVC_BATCH=False) else: - env = Environment() + env = Environment() -AddOption('--userfile', action='store_true', dest='userfile', default=False, - help="Create Visual Studio Project user file") +AddOption( + '--userfile', + action='store_true', + dest='userfile', + default=False, + help="Create Visual C++ project file", +) # # 1. Configure your Debug Setting dictionary with options you want in the list @@ -1085,28 +1165,28 @@ # a specific application for testing your dll with Microsoft Visual Studio 2008 (v9): # V9DebugSettings = { - 'Command':'c:\\myapp\\using\\thisdll.exe', + 'Command': 'c:\\myapp\\using\\thisdll.exe', 'WorkingDirectory': 'c:\\myapp\\using\\', 'CommandArguments': '-p password', -# 'Attach':'false', -# 'DebuggerType':'3', -# 'Remote':'1', -# 'RemoteMachine': None, -# 'RemoteCommand': None, -# 'HttpUrl': None, -# 'PDBPath': None, -# 'SQLDebugging': None, -# 'Environment': '', -# 'EnvironmentMerge':'true', -# 'DebuggerFlavor': None, -# 'MPIRunCommand': None, -# 'MPIRunArguments': None, -# 'MPIRunWorkingDirectory': None, -# 'ApplicationCommand': None, -# 'ApplicationArguments': None, -# 'ShimCommand': None, -# 'MPIAcceptMode': None, -# 'MPIAcceptFilter': None, + # 'Attach':'false', + # 'DebuggerType':'3', + # 'Remote':'1', + # 'RemoteMachine': None, + # 'RemoteCommand': None, + # 'HttpUrl': None, + # 'PDBPath': None, + # 'SQLDebugging': None, + # 'Environment': '', + # 'EnvironmentMerge':'true', + # 'DebuggerFlavor': None, + # 'MPIRunCommand': None, + # 'MPIRunArguments': None, + # 'MPIRunWorkingDirectory': None, + # 'ApplicationCommand': None, + # 'ApplicationArguments': None, + # 'ShimCommand': None, + # 'MPIAcceptMode': None, + # 'MPIAcceptFilter': None, } # @@ -1120,28 +1200,28 @@ 'LocalDebuggerCommand': 'c:\\myapp\\using\\thisdll.exe', 'LocalDebuggerWorkingDirectory': 'c:\\myapp\\using\\', 'LocalDebuggerCommandArguments': '-p password', -# 'LocalDebuggerEnvironment': None, -# 'DebuggerFlavor': 'WindowsLocalDebugger', -# 'LocalDebuggerAttach': None, -# 'LocalDebuggerDebuggerType': None, -# 'LocalDebuggerMergeEnvironment': None, -# 'LocalDebuggerSQLDebugging': None, -# 'RemoteDebuggerCommand': None, -# 'RemoteDebuggerCommandArguments': None, -# 'RemoteDebuggerWorkingDirectory': None, -# 'RemoteDebuggerServerName': None, -# 'RemoteDebuggerConnection': None, -# 'RemoteDebuggerDebuggerType': None, -# 'RemoteDebuggerAttach': None, -# 'RemoteDebuggerSQLDebugging': None, -# 'DeploymentDirectory': None, -# 'AdditionalFiles': None, -# 'RemoteDebuggerDeployDebugCppRuntime': None, -# 'WebBrowserDebuggerHttpUrl': None, -# 'WebBrowserDebuggerDebuggerType': None, -# 'WebServiceDebuggerHttpUrl': None, -# 'WebServiceDebuggerDebuggerType': None, -# 'WebServiceDebuggerSQLDebugging': None, + # 'LocalDebuggerEnvironment': None, + # 'DebuggerFlavor': 'WindowsLocalDebugger', + # 'LocalDebuggerAttach': None, + # 'LocalDebuggerDebuggerType': None, + # 'LocalDebuggerMergeEnvironment': None, + # 'LocalDebuggerSQLDebugging': None, + # 'RemoteDebuggerCommand': None, + # 'RemoteDebuggerCommandArguments': None, + # 'RemoteDebuggerWorkingDirectory': None, + # 'RemoteDebuggerServerName': None, + # 'RemoteDebuggerConnection': None, + # 'RemoteDebuggerDebuggerType': None, + # 'RemoteDebuggerAttach': None, + # 'RemoteDebuggerSQLDebugging': None, + # 'DeploymentDirectory': None, + # 'AdditionalFiles': None, + # 'RemoteDebuggerDeployDebugCppRuntime': None, + # 'WebBrowserDebuggerHttpUrl': None, + # 'WebBrowserDebuggerDebuggerType': None, + # 'WebServiceDebuggerHttpUrl': None, + # 'WebServiceDebuggerDebuggerType': None, + # 'WebServiceDebuggerSQLDebugging': None, } # @@ -1163,72 +1243,86 @@ barsrcs = ['bar.cpp', 'dllmain.cpp', 'stdafx.cpp'] barincs = ['targetver.h'] barlocalincs = ['StdAfx.h'] -barresources = ['bar.rc','resource.h'] +barresources = ['bar.rc', 'resource.h'] barmisc = ['ReadMe.txt'] -dll = env.SharedLibrary(target='bar.dll', - source=barsrcs) +dll = env.SharedLibrary(target='bar.dll', source=barsrcs) -env.MSVSProject(target='Bar' + env['MSVSPROJECTSUFFIX'], - srcs=barsrcs, - incs=barincs, - localincs=barlocalincs, - resources=barresources, - misc=barmisc, - buildtarget=[dll[0]] * 2, - variant=('Debug|Win32', 'Release|Win32'), - cmdargs='vc=%s' % msvcver, - DebugSettings=(dbgSettings, {})) +env.MSVSProject( + target='Bar' + env['MSVSPROJECTSUFFIX'], + srcs=barsrcs, + incs=barincs, + localincs=barlocalincs, + resources=barresources, + misc=barmisc, + buildtarget=[dll[0]] * 2, + variant=('Debug|Win32', 'Release|Win32'), + cmdargs=f'vc={msvcver}', + DebugSettings=(dbgSettings, {}), +) MSVSSolution() env.MSVSSolution() - Builds a Microsoft Visual Studio solution file. + Build a Microsoft Visual Studio Solution file. - This builds a Visual Studio solution file, based on the - version of Visual Studio that is configured (either the + Builds a Visual Studio solution file based on the + version of Visual Studio that is configured: either the latest installed version, or the version specified by - &cv-link-MSVS_VERSION; in the construction environment). For - Visual Studio 6, it will generate a .dsw - file. For Visual Studio 7 (.NET), it will generate a - .sln file. + &cv-link-MSVC_VERSION; in the &consenv;. For + Visual Studio 6, a .dsw file is generated. + For Visual Studio .NET 2002 and later, + it will generate a .sln file. + Note there are multiple versioning schemes involved in + the Microsoft compilation environment - + see the description of &cv-link-MSVC_VERSION; for equivalences. + + + The solution file is a container for one or more projects, + and follows the format described at + + https://learn.microsoft.com/en-us/visualstudio/extensibility/internals/solution-dot-sln-file. The following values must be specified: - target + target - The name of the target .dsw or .sln file. The correct + The name of the target .dsw or + .sln file. The correct suffix for the version of Visual Studio must be used, but the value &cv-link-MSVSSOLUTIONSUFFIX; will be defined to the correct value (see example below). - - variant + + + variant + The name of this particular variant, or a list of variant names (the latter is only supported for MSVS 7 solutions). These are typically things like "Debug" or "Release", but really can be anything you want. For MSVS 7 they may also specify target platform, like this - "Debug|Xbox". Default platform is Win32. + "Debug|Xbox". Default platform is Win32. - - projects + + + projects + A list of project file names, or Project nodes returned - by calls to the &b-MSVSProject; Builder, to be placed - into the solution file. It should be noted that these - file names are NOT added to the $SOURCES environment - variable in form of files, but rather as strings. - This is because they represent file names to be added - to the solution file, not the source files used to - build the solution file. + by calls to the &b-link-MSVSProject; Builder, to be placed + into the solution file. + Note that these filenames need to be specified as strings, + NOT as &SCons; File Nodes. + This is because the solution file will be interpreted by MSBuild + and by Visual Studio, which know nothing about &SCons; Node types. @@ -1446,7 +1540,7 @@ This builder is only provided when Microsoft Visual C++ is being used as the compiler. The &b-PCH; builder is generally used in -conjunction with the &cv-link-PCH; construction variable to force object files to use +conjunction with the &cv-link-PCH; &consvar; to force object files to use the precompiled header: @@ -1988,15 +2082,17 @@ -classes = env.Java(target = 'classdir', source = 'src') -env.RMIC(target = 'outdir1', source = classes) - -env.RMIC(target = 'outdir2', - source = ['package/foo.class', 'package/bar.class']) - -env.RMIC(target = 'outdir3', - source = ['classes/foo.class', 'classes/bar.class'], - JAVACLASSDIR = 'classes') +classes = env.Java(target='classdir', source='src') +env.RMIC(target='outdir1', source=classes) +env.RMIC( + target='outdir2', + source=['package/foo.class', 'package/bar.class'], +) +env.RMIC( + target='outdir3', + source=['classes/foo.class', 'classes/bar.class'], + JAVACLASSDIR='classes', +) @@ -2353,6 +2449,11 @@ +By default the target file encoding is "utf-8" and can be changed by &cv-link-FILE_ENCODING; +Examples: + + + If a single source file name is specified and has a .in suffix, the suffix is stripped and the remainder of the name is used as the default target name. @@ -2484,7 +2585,7 @@ are flattened. Source strings need not literally be Python strings: they can be Nodes or Python objects that convert cleanly -to &f-link-Value; nodes +to &f-link-Value; nodes. @@ -2492,6 +2593,9 @@ and &cv-link-TEXTFILESUFFIX; &consvars; (by default an empty string and .txt, respectively) are automatically added to the target if they are not already present. + + +By default the target file encoding is "utf-8" and can be changed by &cv-link-FILE_ENCODING; Examples: @@ -2704,14 +2808,14 @@ Builds a header file, an implementation file and a moc file from an ui file. and returns the corresponding nodes in the that order. -This builder is only available after using the tool &t-link-qt;. +This builder is only available after using the tool &t-link-qt3;. Note: you can specify .ui files directly as source files to the &b-link-Program;, &b-link-Library; and &b-link-SharedLibrary; builders without using this builder. Using this builder lets you override the standard naming conventions (be careful: prefixes are always prepended to names of built files; if you don't want prefixes, you may set them to ``). -See the &cv-link-QTDIR; variable for more information. +See the &cv-link-QT3DIR; variable for more information. Example: diff -Nru scons-4.4.0+dfsg/doc/generated/examples/caching_ex-random_1.xml scons-4.5.2+dfsg/doc/generated/examples/caching_ex-random_1.xml --- scons-4.4.0+dfsg/doc/generated/examples/caching_ex-random_1.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/generated/examples/caching_ex-random_1.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,8 +1,8 @@ % scons -Q +cc -o f1.o -c f1.c cc -o f4.o -c f4.c cc -o f2.o -c f2.c -cc -o f3.o -c f3.c -cc -o f1.o -c f1.c cc -o f5.o -c f5.c +cc -o f3.o -c f3.c cc -o prog f1.o f2.o f3.o f4.o f5.o diff -Nru scons-4.4.0+dfsg/doc/generated/examples/factories_Chmod_1.xml scons-4.5.2+dfsg/doc/generated/examples/factories_Chmod_1.xml --- scons-4.4.0+dfsg/doc/generated/examples/factories_Chmod_1.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/generated/examples/factories_Chmod_1.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ % scons -Q Copy("file.out", "file.in") -Chmod("file.out", 0755) +Chmod("file.out", 0o755) diff -Nru scons-4.4.0+dfsg/doc/generated/examples/separate_ex1_2.xml scons-4.5.2+dfsg/doc/generated/examples/separate_ex1_2.xml --- scons-4.4.0+dfsg/doc/generated/examples/separate_ex1_2.xml 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/doc/generated/examples/separate_ex1_2.xml 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,16 @@ +% scons -Q --tree=prune +cc -o build/hello.o -c build/hello.c +cc -o build/hello build/hello.o ++-. + +-SConstruct + +-build + | +-build/SConscript + | +-build/hello + | | +-build/hello.o + | | +-build/hello.c + | +-build/hello.c + | +-[build/hello.o] + +-src + +-src/SConscript + +-src/hello.c + diff -Nru scons-4.4.0+dfsg/doc/generated/examples/troubleshoot_stacktrace_2.xml scons-4.5.2+dfsg/doc/generated/examples/troubleshoot_stacktrace_2.xml --- scons-4.4.0+dfsg/doc/generated/examples/troubleshoot_stacktrace_2.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/generated/examples/troubleshoot_stacktrace_2.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,11 +1,11 @@ % scons -Q --debug=stacktrace scons: *** [prog.o] Source `prog.c' not found, needed by target `prog.o'. scons: internal stack trace: - File "SCons/Job.py", line 203, in start + File "SCons/Taskmaster/Job.py", line 219, in start task.prepare() File "SCons/Script/Main.py", line 180, in prepare return SCons.Taskmaster.OutOfDateTask.prepare(self) - File "SCons/Taskmaster.py", line 186, in prepare + File "SCons/Taskmaster/__init__.py", line 195, in prepare executor.prepare() File "SCons/Executor.py", line 418, in prepare raise SCons.Errors.StopError(msg % (s, self.batches[0].targets[0])) diff -Nru scons-4.4.0+dfsg/doc/generated/functions.gen scons-4.5.2+dfsg/doc/generated/functions.gen --- scons-4.4.0+dfsg/doc/generated/functions.gen 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/generated/functions.gen 2023-03-21 16:17:04.000000000 +0000 @@ -425,126 +425,205 @@ env.Append(key=val, [...]) -Intelligently append values to &consvars; in the &consenv; -named by env. +Appends value(s) intelligently to &consvars; in +env. The &consvars; and values to add to them are passed as key=val pairs (&Python; keyword arguments). &f-env-Append; is designed to allow adding values -without normally having to know the data type of an existing &consvar;. +without having to think about the data type of an existing &consvar;. Regular &Python; syntax can also be used to manipulate the &consvar;, -but for that you must know the type of the &consvar;: -for example, different &Python; syntax is needed to combine -a list of values with a single string value, or vice versa. +but for that you may need to know the types involved, +for example pure &Python; lets you directly "add" two lists of strings, +but adding a string to a list or a list to a string requires +different syntax - things &f-Append; takes care of. Some pre-defined &consvars; do have type expectations -based on how &SCons; will use them, -for example &cv-link-CPPDEFINES; is normally a string or a list of strings, -but can be a string, -a list of strings, -a list of tuples, -or a dictionary, while &cv-link-LIBEMITTER; -would expect a callable or list of callables, -and &cv-link-BUILDERS; would expect a mapping type. +based on how &SCons; will use them: +for example &cv-link-CPPDEFINES; is often a string or a list of strings, +but can also be a list of tuples or a dictionary; +while &cv-link-LIBEMITTER; +is expected to be a callable or list of callables, +and &cv-link-BUILDERS; is expected to be a dictionary. Consult the documentation for the various &consvars; for more details. -The following descriptions apply to both the append -and prepend functions, the only difference being -the insertion point of the added values. - - -If env. does not have a &consvar; -indicated by key, -val -is added to the environment under that key as-is. +The following descriptions apply to both the &f-Append; +and &f-Prepend; methods, as well as their +Unique variants, +with the differences being the insertion point of the added values +and whether duplication is allowed. -val can be almost any type, -and &SCons; will combine it with an existing value into an appropriate type, -but there are a few special cases to be aware of. -When two strings are combined, -the result is normally a new string, -with the caller responsible for supplying any needed separation. -The exception to this is the &consvar; &cv-link-CPPDEFINES;, -in which each item will be postprocessed by adding a prefix -and/or suffix, -so the contents are treated as a list of strings, that is, -adding a string will result in a separate string entry, -not a combined string. For &cv-CPPDEFINES; as well as -for &cv-link-LIBS;, and the various *PATH; -variables, &SCons; will supply the compiler-specific -syntax (e.g. adding a -D or /D -prefix for &cv-CPPDEFINES;), so this syntax should be omitted when +val can be almost any type. +If env does not have a &consvar; +named key, +then key is simply +stored with a value of val. +Otherwise, val is +combinined with the existing value, +possibly converting into an appropriate type +which can hold the expanded contents. +There are a few special cases to be aware of. +Normally, when two strings are combined, +the result is a new string containing their concatenation +(and you are responsible for supplying any needed separation); +however, the contents of &cv-link-CPPDEFINES; will +will be postprocessed by adding a prefix and/or suffix +to each entry when the command line is produced, +so &SCons; keeps them separate - +appending a string will result in a separate string entry, +not a combined string. +For &cv-CPPDEFINES;. as well as +&cv-link-LIBS;, and the various *PATH variables, +&SCons; will amend the variable by supplying the compiler-specific +syntax (e.g. prepending a -D or /D +prefix for &cv-CPPDEFINES;), so you should omit this syntax when adding values to these variables. -Example (gcc syntax shown in the expansion of &CPPDEFINES;): +Examples (gcc syntax shown in the expansion of &CPPDEFINES;): env = Environment(CXXFLAGS="-std=c11", CPPDEFINES="RELEASE") -print("CXXFLAGS={}, CPPDEFINES={}".format(env['CXXFLAGS'], env['CPPDEFINES'])) -# notice including a leading space in CXXFLAGS value +print(f"CXXFLAGS = {env['CXXFLAGS']}, CPPDEFINES = {env['CPPDEFINES']}") +# notice including a leading space in CXXFLAGS addition env.Append(CXXFLAGS=" -O", CPPDEFINES="EXTRA") -print("CXXFLAGS={}, CPPDEFINES={}".format(env['CXXFLAGS'], env['CPPDEFINES'])) -print("CPPDEFINES will expand to {}".format(env.subst("$_CPPDEFFLAGS"))) +print(f"CXXFLAGS = {env['CXXFLAGS']}, CPPDEFINES = {env['CPPDEFINES']}") +print("CPPDEFINES will expand to", env.subst('$_CPPDEFFLAGS')) $ scons -Q -CXXFLAGS=-std=c11, CPPDEFINES=RELEASE -CXXFLAGS=-std=c11 -O, CPPDEFINES=['RELEASE', 'EXTRA'] +CXXFLAGS = -std=c11, CPPDEFINES = RELEASE +CXXFLAGS = -std=c11 -O, CPPDEFINES = deque(['RELEASE', 'EXTRA']) CPPDEFINES will expand to -DRELEASE -DEXTRA scons: `.' is up to date. -Because &cv-link-CPPDEFINES; is intended to -describe C/C++ pre-processor macro definitions, -it accepts additional syntax. -Preprocessor macros can be valued, or un-valued, as in --DBAR=1 or --DFOO. -The macro can be be supplied as a complete string including the value, -or as a tuple (or list) of macro, value, or as a dictionary. -Example (again gcc syntax in the expanded defines): +Because &cv-link-CPPDEFINES; is intended for command-line +specification of C/C++ preprocessor macros, +additional syntax is accepted when adding to it. +The preprocessor accepts arguments to predefine a macro name by itself +(-DFOO for most compilers, +/DFOO for Microsoft C++), +which gives it an implicit value of 1, +or can be given with a replacement value +(-DBAR=TEXT). +&SCons; follows these rules when adding to &cv-CPPDEFINES;: + + + +A string is split on spaces, +giving an easy way to enter multiple macros in one addition. +Use an = to specify a valued macro. + + +A tuple is treated as a valued macro. +Use the value None if the macro should not have a value. +It is an error to supply more than two elements in such a tuple. + + +A list is processed in order, +adding each item without further interpretation. +In this case, space-separated strings are not split. + + +A dictionary is processed in order, +adding each key:value pair as a valued macro. +Use the value None if the macro should not have a value. + + + + + +Examples: env = Environment(CPPDEFINES="FOO") -print("CPPDEFINES={}".format(env['CPPDEFINES'])) +print("CPPDEFINES =", env['CPPDEFINES']) env.Append(CPPDEFINES="BAR=1") -print("CPPDEFINES={}".format(env['CPPDEFINES'])) -env.Append(CPPDEFINES=("OTHER", 2)) -print("CPPDEFINES={}".format(env['CPPDEFINES'])) +print("CPPDEFINES =", env['CPPDEFINES']) +env.Append(CPPDEFINES=[("OTHER", 2)]) +print("CPPDEFINES =", env['CPPDEFINES']) env.Append(CPPDEFINES={"EXTRA": "arg"}) -print("CPPDEFINES={}".format(env['CPPDEFINES'])) -print("CPPDEFINES will expand to {}".format(env.subst("$_CPPDEFFLAGS"))) +print("CPPDEFINES =", env['CPPDEFINES']) +print("CPPDEFINES will expand to", env.subst('$_CPPDEFFLAGS')) $ scons -Q -CPPDEFINES=FOO -CPPDEFINES=['FOO', 'BAR=1'] -CPPDEFINES=['FOO', 'BAR=1', ('OTHER', 2)] -CPPDEFINES=['FOO', 'BAR=1', ('OTHER', 2), {'EXTRA': 'arg'}] +CPPDEFINES = FOO +CPPDEFINES = deque(['FOO', 'BAR=1']) +CPPDEFINES = deque(['FOO', 'BAR=1', ('OTHER', 2)]) +CPPDEFINES = deque(['FOO', 'BAR=1', ('OTHER', 2), ('EXTRA', 'arg')]) CPPDEFINES will expand to -DFOO -DBAR=1 -DOTHER=2 -DEXTRA=arg scons: `.' is up to date. -Adding a string val -to a dictonary &consvar; will enter -val as the key in the dict, +Examples of adding multiple macros: + + + +env = Environment() +env.Append(CPPDEFINES=[("ONE", 1), "TWO", ("THREE", )]) +print("CPPDEFINES =", env['CPPDEFINES']) +env.Append(CPPDEFINES={"FOUR": 4, "FIVE": None}) +print("CPPDEFINES =", env['CPPDEFINES']) +print("CPPDEFINES will expand to", env.subst('$_CPPDEFFLAGS')) + + + +$ scons -Q +CPPDEFINES = [('ONE', 1), 'TWO', ('THREE',)] +CPPDEFINES = deque([('ONE', 1), 'TWO', ('THREE',), ('FOUR', 4), ('FIVE', None)]) +CPPDEFINES will expand to -DONE=1 -DTWO -DTHREE -DFOUR=4 -DFIVE +scons: `.' is up to date. + + + +Changed in version 4.5: +clarifined the use of tuples vs. other types, +handling is now consistent across the four functions. + + + +env = Environment() +env.Append(CPPDEFINES=("MACRO1", "MACRO2")) +print("CPPDEFINES =", env['CPPDEFINES']) +env.Append(CPPDEFINES=[("MACRO3", "MACRO4")]) +print("CPPDEFINES =", env['CPPDEFINES']) +print("CPPDEFINES will expand to", env.subst('$_CPPDEFFLAGS')) + + + +$ scons -Q +CPPDEFINES = ('MACRO1', 'MACRO2') +CPPDEFINES = deque(['MACRO1', 'MACRO2', ('MACRO3', 'MACRO4')]) +CPPDEFINES will expand to -DMACRO1 -DMACRO2 -DMACRO3=MACRO4 +scons: `.' is up to date. + + + +See &cv-link-CPPDEFINES; for more details. + + + +Appending a string val +to a dictonary-typed &consvar; enters +val as the key in the dictionary, and None as its value. -Using a tuple type to supply a key + value only works -for the special case of &cv-link-CPPDEFINES; +Using a tuple type to supply a key, value +only works for the special case of &cv-CPPDEFINES; described above. Although most combinations of types work without needing to know the details, some combinations -do not make sense and a &Python; exception will be raised. +do not make sense and &Python; raises an exception. @@ -552,7 +631,7 @@ which are path specifications (conventionally, the names of such end in PATH), it is recommended to add the values as a list of strings, -even if there is only a single string to add. +even if you are only adding a single string. The same goes for adding library names to &cv-LIBS;. @@ -616,18 +695,18 @@ - env.AppendUnique(key=val, [...], delete_existing=False) + env.AppendUnique(key=val, [...], [delete_existing=False]) Append values to &consvars; in the current &consenv;, maintaining uniqueness. -Works like &f-link-env-Append; (see for details), -except that values already present in the &consvar; -will not be added again. +Works like &f-link-env-Append;, +except that values that would become duplicates +are not added. If delete_existing -is True, -the existing matching value is first removed, -and the requested value is added, -having the effect of moving such values to the end. +is set to a true value, then for any duplicate, +the existing instance of val is first removed, +then val is appended, +having the effect of moving it to the end. @@ -1546,7 +1625,6 @@ EnsurePythonVersion(major, minor) - env.EnsurePythonVersion(major, minor) Ensure that the Python version is at least major.minor. @@ -1566,7 +1644,6 @@ EnsureSConsVersion(major, minor, [revision]) - env.EnsureSConsVersion(major, minor, [revision]) Ensure that the SCons version is at least major.minor, @@ -1669,7 +1746,6 @@ Exit([value]) - env.Exit([value]) This tells &scons; @@ -2117,7 +2193,6 @@ GetLaunchDir() - env.GetLaunchDir() Returns the absolute path name of the directory from which &scons; @@ -2138,18 +2213,22 @@ GetOption(name) env.GetOption(name) -This function provides a way to query the value of -options which can be set via the command line or using the -&f-link-SetOption; function. +Query the value of settable options which may have been set +on the command line, or by using the &f-link-SetOption; function. +The value of the option is returned in a type matching how the +option was declared - see the documentation for the +corresponding command line option for information about each specific +option. + name can be an entry from the following table, which shows the corresponding command line arguments that could affect the value. name can be also be the destination variable name from a project-specific option added using the -&f-link-AddOption; function, as long as the addition -happens prior to the &f-GetOption; call in the SConscript files. +&f-link-AddOption; function, as long as that addition has been +processed prior to the &f-GetOption; call in the &SConscript; files. @@ -2374,56 +2453,83 @@ - - -See the documentation for the -corresponding command line option for information about each specific -option. - - Glob(pattern, [ondisk, source, strings, exclude]) - env.Glob(pattern, [ondisk, source, strings, exclude]) + Glob(pattern, [ondisk=True, source=False, strings=False, exclude=None]) + env.Glob(pattern, [ondisk=True, source=False, strings=False, exclude=None]) -Returns Nodes (or strings) that match the specified -pattern, -relative to the directory of the current -&SConscript; -file. +Returns a possibly empty list of Nodes (or strings) that match +pathname specification pattern. +pattern can be absolute, +top-relative, +or (most commonly) relative to the directory of the current +&SConscript; file. +&f-Glob; matches both files stored on disk and Nodes +which &SCons; already knows about, even if any corresponding +file is not currently stored on disk. The evironment method form (&f-env-Glob;) performs string substition on pattern -and returns whatever matches -the resulting expanded pattern. +and returns whatever matches the resulting expanded pattern. +The results are sorted, unlike for the similar &Python; +glob.glob function, +to ensure build order will be stable. -The specified pattern -uses Unix shell style metacharacters for matching: +can contain POSIX-style shell metacharacters for matching: - - * matches everything - ? matches any single character - [seq] matches any character in seq - [!seq] matches any char not in seq - + + + + + Pattern + Meaning + + + + + * + matches everything + + + ? + matches any single character + + + [seq] + matches any character in seq + (can be a list or a range). + + + [!seq] + matches any character not in seq + + + + -If the first character of a filename is a dot, -it must be matched explicitly. -Character matches do -not -span directory separators. +For a literal match, wrap the metacharacter in brackets to +escape the normal behavior. +For example, '[?]' matches the character +'?'. + + + +Filenames starting with a dot are specially handled - +they can only be matched by patterns that start with a dot +(or have a dot immediately following a pathname separator +character, or slash), they are not not matched by the metacharacters. +Metacharacter matches also do not span directory separators. -The &f-Glob; -knows about -repositories +understands repositories (see the &f-link-Repository; function) @@ -2431,8 +2537,7 @@ (see the &f-link-VariantDir; function) -and -returns a Node (or string, if so configured) +and returns a Node (or string, if so configured) match in the local (SConscript) directory if a matching Node is found anywhere in a corresponding @@ -2440,65 +2545,60 @@ -The +If the optional ondisk -argument may be set to a value which evaluates -False -to disable the search for matches on disk, -thereby only returning matches among -already-configured File or Dir Nodes. -The default behavior is to -return corresponding Nodes -for any on-disk matches found. +argument evaluates false, +the search for matches on disk is disabled, +and only matches from +already-configured File or Dir Nodes are returned. +The default is to return Nodes for +matches on disk as well. -The +If the optional source -argument may be set to a value which evaluates -True -to specify that, -when the local directory is a -&f-VariantDir;, -the returned Nodes should be from the -corresponding source directory, -not the local directory. +argument evaluates true, +and the local directory is a variant directory, +then &f-Glob; returnes Nodes from +the corresponding source directory, +rather than the local directory. + -The +If the optional strings -argument may be set to a value which evaluates -True -to have the +argument evaluates true, &f-Glob; -function return strings, not Nodes, -that represent the matched files or directories. +returns matches as strings, rather than Nodes. The returned strings will be relative to the local (SConscript) directory. -(Note that This may make it easier to perform +(Note that while this may make it easier to perform arbitrary manipulation of file names, -but if the returned strings are +it loses the context &SCons; would have in the Node, +so if the returned strings are passed to a different &SConscript; file, -any Node translation will be relative -to the other +any Node translation there will be relative +to that &SConscript; directory, -not the original +not to the original &SConscript; directory.) -The +The optional exclude argument may be set to a pattern or a list of patterns -(following the same Unix shell semantics) -which must be filtered out of returned elements. -Elements matching a least one pattern of -this list will be excluded. +descibing files or directories +to filter out of the match list. +Elements matching a least one specified pattern will be excluded. +These patterns use the same syntax as for +pattern. @@ -2508,9 +2608,10 @@ Program("foo", Glob("*.c")) Zip("/tmp/everything", Glob(".??*") + Glob("*")) -sources = Glob("*.cpp", exclude=["os_*_specific_*.cpp"]) + \ - Glob( "os_%s_specific_*.cpp" % currentOS) +sources = Glob("*.cpp", exclude=["os_*_specific_*.cpp"]) \ + + Glob("os_%s_specific_*.cpp" % currentOS) + @@ -2542,24 +2643,26 @@ Ignore(target, dependency) env.Ignore(target, dependency) -The specified dependency file(s) -will be ignored when deciding if -the target file(s) need to be rebuilt. - - - -You can also use -&f-Ignore; -to remove a target from the default build. -In order to do this you must specify the directory the target will -be built in as the target, and the file you want to skip building -as the dependency. +Ignores dependency +when deciding if +target needs to be rebuilt. +target and +dependency +can each be a single filename or Node +or a list of filenames or Nodes. -Note that this will only remove the dependencies listed from -the files built by default. It will still be built if that -dependency is needed by another object being built. +&f-Ignore; can also be used to +remove a target from the default build +by specifying the directory the target will be built in as +target +and the file you want to skip selecting for building as +dependency. +Note that this only removes the target from +the default target selection algorithm: +if it is a dependency of another object being +built &SCons; still builds it normally. See the third and forth examples below. @@ -2925,15 +3028,15 @@ command typical of the POSIX programming environment (for example, pkg-config). -Note that such a comamnd is executed using the +Note that such a command is executed using the SCons execution environment; if the command needs additional information, -that information needs to be explcitly provided. +that information needs to be explicitly provided. See &f-link-ParseConfig; for more details. -Flag values are translated accordig to the prefix found, +Flag values are translated according to the prefix found, and added to the following construction variables: @@ -2944,6 +3047,7 @@ -frameworkdir= FRAMEWORKPATH -fmerge-all-constants CCFLAGS, LINKFLAGS -fopenmp CCFLAGS, LINKFLAGS +-fsanitize CCFLAGS, LINKFLAGS -include CCFLAGS -imacros CCFLAGS -isysroot CCFLAGS, LINKFLAGS @@ -3115,20 +3219,20 @@ - env.PrependUnique(key=val, delete_existing=False, [...]) + env.PrependUnique(key=val, [...], [delete_existing=False]) Prepend values to &consvars; in the current &consenv;, maintaining uniqueness. -Works like &f-link-env-Append; (see for details), +Works like &f-link-env-Append;, except that values are added to the front, -rather than the end, of any existing value of the &consvar;, -and values already present in the &consvar; -will not be added again. +rather than the end, of the &consvar;, +and values that would become duplicates +are not added. If delete_existing -is True, -the existing matching value is first removed, -and the requested value is inserted, -having the effect of moving such values to the front. +is set to a true value, then for any duplicate, +the existing instance of val is first removed, +then val is inserted, +having the effect of moving it to the front. @@ -3712,24 +3816,16 @@ SConscriptChdir(value) - env.SConscriptChdir(value) By default, &scons; changes its working directory to the directory in which each -subsidiary SConscript file lives. +subsidiary SConscript file lives +while reading and processing that script. This behavior may be disabled -by specifying either: - - - -SConscriptChdir(0) -env.SConscriptChdir(0) - - - -in which case +by specifying an argument which +evaluates false, in which case &scons; will stay in the top-level directory while reading all SConscript files. @@ -3747,10 +3843,9 @@ -env = Environment() -SConscriptChdir(0) +SConscriptChdir(False) SConscript('foo/SConscript') # will not chdir to foo -env.SConscriptChdir(1) +SConscriptChdir(True) SConscript('bar/SConscript') # will chdir to bar @@ -4339,6 +4434,12 @@ +Changed in version 4.2: +&f-env-Tool; now returns the tool object, +previously it did not return (i.e. returned None). + + + Examples: @@ -4375,14 +4476,63 @@ gltool = Tool('opengl', toolpath = ['tools']) gltool(env) # adds 'opengl' to the TOOLS variable - - -Changed in &SCons; 4.2: &f-env-Tool; now returns -the tool object, previously it did not return -(i.e. returned None). - + + ValidateOptions([throw_exception=False]) + + Check that all the options specified on the command line are either defined by SCons itself + or defined by calls to &f-link-AddOption;. + + + This function should only be called after the last &f-link-AddOption; call in your &SConscript; + logic. + + + Be aware that some tools call &f-link-AddOption;, if you are getting error messages for arguments + that they add, you will need to ensure that you load those tools before you call &f-ValidateOptions;. + + + If there are any command line options not defined, calling this function will cause SCons to issue an + error message and then exit with an error exit + status. + If the optional throw_exception is True, &f-ValidateOptions; will raise a + SConsBadOptionError + exception. This would allow the calling + &SConscript; logic can catch that exception and handle invalid options itself. + + + + Example: + + + +try: + ValidateOptions(throw_exception=True) +except SConsBadOptionError as e: + print("Parser is SConsOptionParser:%s" % (isinstance(e.parser, SConsOptionParser))) + print("Message is :%s" % e.opt_str) + Exit(3) + + + + This function is useful to force SCons to fail fast before you execute any expensive logic later in your + build logic. + For example if you specify build options via any flags, a simple typo could yield the incorrect build + option throughout your entire build. + + +scons --compilers=mingw (the correct flag is --compiler) + + + Could cause SCons to run configure steps with the incorrect compiler. Costing developer time trying to + track down why the configure logic failed with a compiler which should work. + + + New in version 4.5.0 + + + Value(value, [built_value], [name]) env.Value(value, [built_value], [name]) @@ -4398,10 +4548,6 @@ files are up-to-date.) When using timestamp source signatures, Value Nodes' timestamps are equal to the system time when the Node is created. -name can be provided as an alternative name -for the resulting Value node; this is advised -if the value parameter can't be converted to -a string. @@ -4421,6 +4567,18 @@ +The optional name parameter can be provided as an +alternative name for the resulting Value node; +this is advised if the value parameter +cannot be converted to a string. + + + +Changed in version 4.0: +the name parameter was added. + + + Examples: @@ -4440,8 +4598,8 @@ # Attach a .Config() builder for the above function action # to the construction environment. -env['BUILDERS']['Config'] = Builder(action = create) -env.Config(target = 'package-config', source = Value(prefix)) +env['BUILDERS']['Config'] = Builder(action=create) +env.Config(target='package-config', source=Value(prefix)) def build_value(target, source, env): # A function that "builds" a Python Value by updating @@ -4454,8 +4612,8 @@ # Attach a .UpdateValue() builder for the above function # action to the construction environment. -env['BUILDERS']['UpdateValue'] = Builder(action = build_value) -env.UpdateValue(target = Value(output), source = Value(input)) +env['BUILDERS']['UpdateValue'] = Builder(action=build_value) +env.UpdateValue(target=Value(output), source=Value(input)) @@ -4529,7 +4687,7 @@ variant_dir, regardless of the value of duplicate. -When calling an SConscript file, you can use the +When calling an SConscript file, you can use the exports keyword argument to pass parameters (individually or as an appropriately set up environment) so the SConscript can pick up the right settings for that variant build. diff -Nru scons-4.4.0+dfsg/doc/generated/functions.mod scons-4.5.2+dfsg/doc/generated/functions.mod --- scons-4.4.0+dfsg/doc/generated/functions.mod 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/generated/functions.mod 2023-03-21 16:17:04.000000000 +0000 @@ -84,6 +84,7 @@ subst"> Tag"> Tool"> +ValidateOptions"> Value"> VariantDir"> WhereIs"> @@ -164,6 +165,7 @@ env.subst"> env.Tag"> env.Tool"> +env.ValidateOptions"> env.Value"> env.VariantDir"> env.WhereIs"> @@ -250,6 +252,7 @@ subst"> Tag"> Tool"> +ValidateOptions"> Value"> VariantDir"> WhereIs"> @@ -330,6 +333,7 @@ env.subst"> env.Tag"> env.Tool"> +env.ValidateOptions"> env.Value"> env.VariantDir"> env.WhereIs"> diff -Nru scons-4.4.0+dfsg/doc/generated/tools.gen scons-4.5.2+dfsg/doc/generated/tools.gen --- scons-4.4.0+dfsg/doc/generated/tools.gen 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/generated/tools.gen 2023-03-21 16:17:04.000000000 +0000 @@ -779,13 +779,13 @@ msvc -Sets construction variables for the Microsoft Visual C/C++ compiler. +Sets &consvars; for the Microsoft Visual C/C++ compiler. -Sets: &cv-link-BUILDERS;, &cv-link-CC;, &cv-link-CCCOM;, &cv-link-CCDEPFLAGS;, &cv-link-CCFLAGS;, &cv-link-CCPCHFLAGS;, &cv-link-CCPDBFLAGS;, &cv-link-CFILESUFFIX;, &cv-link-CFLAGS;, &cv-link-CPPDEFPREFIX;, &cv-link-CPPDEFSUFFIX;, &cv-link-CXX;, &cv-link-CXXCOM;, &cv-link-CXXFILESUFFIX;, &cv-link-CXXFLAGS;, &cv-link-INCPREFIX;, &cv-link-INCSUFFIX;, &cv-link-OBJPREFIX;, &cv-link-OBJSUFFIX;, &cv-link-PCHCOM;, &cv-link-PCHPDBFLAGS;, &cv-link-RC;, &cv-link-RCCOM;, &cv-link-RCFLAGS;, &cv-link-SHCC;, &cv-link-SHCCCOM;, &cv-link-SHCCFLAGS;, &cv-link-SHCFLAGS;, &cv-link-SHCXX;, &cv-link-SHCXXCOM;, &cv-link-SHCXXFLAGS;, &cv-link-SHOBJPREFIX;, &cv-link-SHOBJSUFFIX;.Uses: &cv-link-CCCOMSTR;, &cv-link-CXXCOMSTR;, &cv-link-MSVC_NOTFOUND_POLICY;, &cv-link-PCH;, &cv-link-PCHSTOP;, &cv-link-PDB;, &cv-link-SHCCCOMSTR;, &cv-link-SHCXXCOMSTR;. +Sets: &cv-link-BUILDERS;, &cv-link-CC;, &cv-link-CCCOM;, &cv-link-CCDEPFLAGS;, &cv-link-CCFLAGS;, &cv-link-CCPCHFLAGS;, &cv-link-CCPDBFLAGS;, &cv-link-CFILESUFFIX;, &cv-link-CFLAGS;, &cv-link-CPPDEFPREFIX;, &cv-link-CPPDEFSUFFIX;, &cv-link-CXX;, &cv-link-CXXCOM;, &cv-link-CXXFILESUFFIX;, &cv-link-CXXFLAGS;, &cv-link-INCPREFIX;, &cv-link-INCSUFFIX;, &cv-link-OBJPREFIX;, &cv-link-OBJSUFFIX;, &cv-link-PCHCOM;, &cv-link-PCHPDBFLAGS;, &cv-link-RC;, &cv-link-RCCOM;, &cv-link-RCFLAGS;, &cv-link-SHCC;, &cv-link-SHCCCOM;, &cv-link-SHCCFLAGS;, &cv-link-SHCFLAGS;, &cv-link-SHCXX;, &cv-link-SHCXXCOM;, &cv-link-SHCXXFLAGS;, &cv-link-SHOBJPREFIX;, &cv-link-SHOBJSUFFIX;.Uses: &cv-link-CCCOMSTR;, &cv-link-CXXCOMSTR;, &cv-link-MSVC_NOTFOUND_POLICY;, &cv-link-MSVC_SCRIPTERROR_POLICY;, &cv-link-MSVC_SCRIPT_ARGS;, &cv-link-MSVC_SDK_VERSION;, &cv-link-MSVC_SPECTRE_LIBS;, &cv-link-MSVC_TOOLSET_VERSION;, &cv-link-MSVC_USE_SCRIPT;, &cv-link-MSVC_USE_SCRIPT_ARGS;, &cv-link-MSVC_USE_SETTINGS;, &cv-link-MSVC_VERSION;, &cv-link-PCH;, &cv-link-PCHSTOP;, &cv-link-PDB;, &cv-link-SHCCCOMSTR;, &cv-link-SHCXXCOMSTR;. msvs - Sets construction variables for Microsoft Visual Studio. + Sets &consvars; for Microsoft Visual Studio. Sets: &cv-link-MSVSBUILDCOM;, &cv-link-MSVSCLEANCOM;, &cv-link-MSVSENCODING;, &cv-link-MSVSPROJECTCOM;, &cv-link-MSVSREBUILDCOM;, &cv-link-MSVSSCONS;, &cv-link-MSVSSCONSCOM;, &cv-link-MSVSSCONSCRIPT;, &cv-link-MSVSSCONSFLAGS;, &cv-link-MSVSSOLUTIONCOM;. @@ -867,13 +867,22 @@ qt +Placeholder tool to alert anyone still using qt tools to switch to qt3 or newer tool. + + + + + qt3 + Sets &consvars; for building Qt3 applications. This tool is only suitable for building targeted to Qt3, which is obsolete -(the tool is deprecated since 4.3). +(the tool is deprecated since 4.3, +and was renamed to qt3 in 4.5.0. +). There are contributed tools for Qt4 and Qt5, see https://github.com/SCons/scons-contrib. @@ -901,11 +910,11 @@ -Environment(tools=['default','qt']) +Environment(tools=['default','qt3']) -The &t-qt; tool supports the following operations: +The &t-qt3; tool supports the following operations: @@ -920,7 +929,7 @@ .hxx, .hh. You can turn off automatic moc file generation by setting -&cv-link-QT_AUTOSCAN; to False. +&cv-link-QT3_AUTOSCAN; to False. See also the corresponding &b-link-Moc; Builder. @@ -930,11 +939,11 @@ As described in the Qt documentation, include the moc file at the end of the C++ file. Note that you have to include the file, which is generated by the transformation -${QT_MOCCXXPREFIX}<basename>${QT_MOCCXXSUFFIX}, by default +${QT3_MOCCXXPREFIX}<basename>${QT3_MOCCXXSUFFIX}, by default <basename>.mo. A warning is generated after building the moc file if you do not include the correct file. If you are using &f-link-VariantDir;, you may need to specify duplicate=True. -You can turn off automatic moc file generation by setting &cv-QT_AUTOSCAN; to +You can turn off automatic moc file generation by setting &cv-QT3_AUTOSCAN; to False. See also the corresponding &b-link-Moc; Builder. @@ -952,7 +961,7 @@ See also the corresponding &b-link-Uic; Builder. -Sets: &cv-link-QTDIR;, &cv-link-QT_AUTOSCAN;, &cv-link-QT_BINPATH;, &cv-link-QT_CPPPATH;, &cv-link-QT_LIB;, &cv-link-QT_LIBPATH;, &cv-link-QT_MOC;, &cv-link-QT_MOCCXXPREFIX;, &cv-link-QT_MOCCXXSUFFIX;, &cv-link-QT_MOCFROMCXXCOM;, &cv-link-QT_MOCFROMCXXFLAGS;, &cv-link-QT_MOCFROMHCOM;, &cv-link-QT_MOCFROMHFLAGS;, &cv-link-QT_MOCHPREFIX;, &cv-link-QT_MOCHSUFFIX;, &cv-link-QT_UIC;, &cv-link-QT_UICCOM;, &cv-link-QT_UICDECLFLAGS;, &cv-link-QT_UICDECLPREFIX;, &cv-link-QT_UICDECLSUFFIX;, &cv-link-QT_UICIMPLFLAGS;, &cv-link-QT_UICIMPLPREFIX;, &cv-link-QT_UICIMPLSUFFIX;, &cv-link-QT_UISUFFIX;.Uses: &cv-link-QTDIR;. +Sets: &cv-link-QT3DIR;, &cv-link-QT3_AUTOSCAN;, &cv-link-QT3_BINPATH;, &cv-link-QT3_CPPPATH;, &cv-link-QT3_LIB;, &cv-link-QT3_LIBPATH;, &cv-link-QT3_MOC;, &cv-link-QT3_MOCCXXPREFIX;, &cv-link-QT3_MOCCXXSUFFIX;, &cv-link-QT3_MOCFROMCXXCOM;, &cv-link-QT3_MOCFROMCXXFLAGS;, &cv-link-QT3_MOCFROMHCOM;, &cv-link-QT3_MOCFROMHFLAGS;, &cv-link-QT3_MOCHPREFIX;, &cv-link-QT3_MOCHSUFFIX;, &cv-link-QT3_UIC;, &cv-link-QT3_UICCOM;, &cv-link-QT3_UICDECLFLAGS;, &cv-link-QT3_UICDECLPREFIX;, &cv-link-QT3_UICDECLSUFFIX;, &cv-link-QT3_UICIMPLFLAGS;, &cv-link-QT3_UICIMPLPREFIX;, &cv-link-QT3_UICIMPLSUFFIX;, &cv-link-QT3_UISUFFIX;.Uses: &cv-link-QT3DIR;. rmic @@ -1071,7 +1080,7 @@ Set &consvars; for the &b-Textfile; and &b-Substfile; builders. -Sets: &cv-link-LINESEPARATOR;, &cv-link-SUBSTFILEPREFIX;, &cv-link-SUBSTFILESUFFIX;, &cv-link-TEXTFILEPREFIX;, &cv-link-TEXTFILESUFFIX;.Uses: &cv-link-SUBST_DICT;. +Sets: &cv-link-FILE_ENCODING;, &cv-link-LINESEPARATOR;, &cv-link-SUBSTFILEPREFIX;, &cv-link-SUBSTFILESUFFIX;, &cv-link-TEXTFILEPREFIX;, &cv-link-TEXTFILESUFFIX;.Uses: &cv-link-SUBST_DICT;. tlib diff -Nru scons-4.4.0+dfsg/doc/generated/tools.mod scons-4.5.2+dfsg/doc/generated/tools.mod --- scons-4.4.0+dfsg/doc/generated/tools.mod 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/generated/tools.mod 2023-03-21 16:17:04.000000000 +0000 @@ -86,6 +86,7 @@ pdftex"> python"> qt"> +qt3"> rmic"> rpcgen"> sgiar"> @@ -192,6 +193,7 @@ pdftex"> python"> qt"> +qt3"> rmic"> rpcgen"> sgiar"> diff -Nru scons-4.4.0+dfsg/doc/generated/variables.gen scons-4.5.2+dfsg/doc/generated/variables.gen --- scons-4.4.0+dfsg/doc/generated/variables.gen 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/generated/variables.gen 2023-03-21 16:17:04.000000000 +0000 @@ -448,7 +448,7 @@ to support building with precompiled headers. The default value expands expands to the appropriate Microsoft Visual C++ command-line options -when the &cv-link-PCH; construction variable is set. +when the &cv-link-PCH; &consvar; is set. @@ -462,7 +462,7 @@ Microsoft Visual C++ PDB file. The default value expands expands to appropriate Microsoft Visual C++ command-line options -when the &cv-link-PDB; construction variable is set. +when the &cv-link-PDB; &consvar; is set. @@ -709,69 +709,112 @@ A platform independent specification of C preprocessor macro definitions. -The definitions will be added to command lines +The definitions are added to command lines through the automatically-generated -&cv-link-_CPPDEFFLAGS; &consvar; (see above), +&cv-link-_CPPDEFFLAGS; &consvar;, which is constructed according to -the type of value of &cv-CPPDEFINES;: +the contents of &cv-CPPDEFINES;: + + If &cv-CPPDEFINES; is a string, the values of the &cv-link-CPPDEFPREFIX; and &cv-link-CPPDEFSUFFIX; &consvars; -will be respectively prepended and appended to -each definition in &cv-link-CPPDEFINES;. +are respectively prepended and appended to +each definition in &cv-CPPDEFINES;, +split on whitespace. -# Will add -Dxyz to POSIX compiler command lines, +# Adds -Dxyz to POSIX compiler command lines, # and /Dxyz to Microsoft Visual C++ command lines. env = Environment(CPPDEFINES='xyz') + + If &cv-CPPDEFINES; is a list, the values of the &cv-CPPDEFPREFIX; and &cv-CPPDEFSUFFIX; &consvars; -will be respectively prepended and appended to +are respectively prepended and appended to each element in the list. -If any element is a list or tuple, -then the first item is the name being -defined and the second item is its value: +If any element is a tuple (or list) +then the first item of the tuple is the macro name +and the second is the macro definition. +If the definition is not omitted or None, +the name and definition are combined into a single +name=definition item +before the preending/appending. -# Will add -DB=2 -DA to POSIX compiler command lines, +# Adds -DB=2 -DA to POSIX compiler command lines, # and /DB=2 /DA to Microsoft Visual C++ command lines. env = Environment(CPPDEFINES=[('B', 2), 'A']) + + If &cv-CPPDEFINES; is a dictionary, the values of the &cv-CPPDEFPREFIX; and &cv-CPPDEFSUFFIX; &consvars; -will be respectively prepended and appended to -each item from the dictionary. -The key of each dictionary item -is a name being defined -to the dictionary item's corresponding value; -if the value is -None, -then the name is defined without an explicit value. -Note that the resulting flags are sorted by keyword -to ensure that the order of the options on the -command line is consistent each time -&scons; -is run. +are respectively prepended and appended to +each key from the dictionary. +If the value for a key is not None, +then the key (macro name) and the value +(macros definition) are combined into a single +name=definition item +before the prepending/appending. -# Will add -DA -DB=2 to POSIX compiler command lines, -# and /DA /DB=2 to Microsoft Visual C++ command lines. +# Adds -DA -DB=2 to POSIX compiler command lines, +# or /DA /DB=2 to Microsoft Visual C++ command lines. env = Environment(CPPDEFINES={'B':2, 'A':None}) + + + +Depending on how contents are added to &cv-CPPDEFINES;, +it may be transformed into a compound type, +for example a list containing strings, tuples and/or dictionaries. +&SCons; can correctly expand such a compound type. + + + +Note that &SCons; may call the compiler via a shell. +If a macro definition contains characters such as spaces that +have meaning to the shell, or is intended to be a string value, +you may need to use the shell's quoting syntax to avoid +interpretation by the shell before the preprocessor sees it. +Function-like macros are not supported via this mechanism +(and some compilers do not even implement that functionality +via the command lines). +When quoting, note that +one set of quote characters are used to define a &Python; string, +then quotes embedded inside that would be consumed by the shell +unless escaped. These examples may help illustrate: + + + +env = Environment(CPPDEFINES=['USE_ALT_HEADER=\\"foo_alt.h\\"']) +env = Environment(CPPDEFINES=[('USE_ALT_HEADER', '\\"foo_alt.h\\"')]) + + + +:Changed in version 4.5: +&SCons; no longer sorts &cv-CPPDEFINES; values entered +in dictionary form. &Python; now preserves dictionary +keys in the order they are entered, so it is no longer +necessary to sort them to ensure a stable command line. + + + @@ -2658,6 +2701,16 @@ + + + FILE_ENCODING + + +File encoding used for files written by &b-link-Textfile; and &b-link-Substfile;. Set to "utf-8" by default. +Added in version 4.5.0. + + + FORTRAN @@ -2689,8 +2742,9 @@ General user-specified options that are passed to the Fortran compiler. Similar to &cv-link-FORTRANFLAGS;, -but this variable is applied to all dialects. +but this &consvar; is applied to all dialects. +New in version 4.4. @@ -2729,9 +2783,8 @@ (or similar) include or module search path options that scons generates automatically from &cv-link-FORTRANPATH;. See -&cv-link-_FORTRANINCFLAGS; and &cv-link-_FORTRANMODFLAG;, -below, -for the variables that expand those options. +&cv-link-_FORTRANINCFLAGS; and &cv-link-_FORTRANMODFLAG; +for the &consvars; that expand those options. @@ -3225,14 +3278,13 @@ IMPLICIT_COMMAND_DEPENDENCIES -Controls whether or not SCons will +Controls whether or not &SCons; will add implicit dependencies for the commands executed to build targets. -By default, SCons will add -to each target +By default, &SCons; will add to each target an implicit dependency on the command represented by the first argument of any command line it executes (which is typically @@ -3476,16 +3528,30 @@ JAVABOOTCLASSPATH - Specifies the list of directories that - will be added to the - &javac; command line - via the option. - The individual directory names will be - separated by the operating system's path separate character - (: on UNIX/Linux/POSIX, - ; - on Windows). + Specifies the location of the bootstrap class files. + Can be specified as a string or Node object, + or as a list of strings or Node objects. + + The value will be added to the JDK command lines + via the option, + which requires a system-specific search path separator. + This will be supplied by &SCons; as needed when it + constructs the command line if &cv-JAVABOOTCLASSPATH; is + provided in list form. + If &cv-JAVABOOTCLASSPATH; is a single string containing + search path separator characters + (: for POSIX systems or + ; for Windows), it will not be modified; + and so is inherently system-specific; + to supply the path in a system-independent manner, + give &cv-JAVABOOTCLASSPATH; as a list of paths instead. + + + + Can only be used when compiling for releases prior to JDK 9. + + @@ -3541,10 +3607,9 @@ The directory in which Java class files may be found. - This is stripped from the beginning of any Java .class - file names supplied to the - JavaH - builder. + This is stripped from the beginning of any Java + .class + file names supplied to the &b-link-JavaH; builder. @@ -3553,19 +3618,54 @@ JAVACLASSPATH - Specifies the list of directories that - will be searched for Java - .class - file. - The directories in this list will be added to the - &javac; and &javah; command lines - via the option. - The individual directory names will be - separated by the operating system's path separate character - (: on UNIX/Linux/POSIX, - ; - on Windows). + Specifies the class search path for the JDK tools. + Can be specified as a string or Node object, + or as a list of strings or Node objects. + Class path entries may be directory names to search + for class files or packages, pathnames to archives + (.jar or .zip) + containing classes, or paths ending in a "base name wildcard" + character (*), which matches files + in that directory with a .jar suffix. + See the Java documentation for more details. + + The value will be added to the JDK command lines + via the option, + which requires a system-specific search path separator. + This will be supplied by &SCons; as needed when it + constructs the command line if &cv-JAVACLASSPATH; is + provided in list form. + If &cv-JAVACLASSPATH; is a single string containing + search path separator characters + (: for POSIX systems or + ; for Windows), + it will be split on the separator into a list of individual + paths for dependency scanning purposes. + It will not be modified for JDK command-line usage, + so such a string is inherently system-specific; + to supply the path in a system-independent manner, + give &cv-JAVACLASSPATH; as a list of paths instead. + + + + &SCons; always + supplies a + when invoking the Java compiler &javac;, + regardless of the setting of &cv-link-JAVASOURCEPATH;, + as it passes the path(s) to the source(s) supplied + in the call to the &b-link-Java; builder via + . + From the documentation of the standard Java toolkit for &javac;: + If not compiling code for modules, if the + or + option is not specified, then the user class path is also + searched for source files. + Since is always supplied, + &javac; will not use the contents of the value of + &cv-JAVACLASSPATH; when searching for sources. + + @@ -3630,7 +3730,37 @@ JAVAINCLUDES - Include path for Java header files (such as jni.h) + Include path for Java header files + (such as jni.h). + + + + + + JAVAPROCESSORPATH + + + Specifies the location of the annotation processor class files. + Can be specified as a string or Node object, + or as a list of strings or Node objects. + + + The value will be added to the JDK command lines + via the option, + which requires a system-specific search path separator. + This will be supplied by &SCons; as needed when it + constructs the command line if &cv-JAVAPROCESSORPATH; is + provided in list form. + If &cv-JAVAPROCESSORPATH; is a single string containing + search path separator characters + (: for POSIX systems or + ; for Windows), it will not be modified; + and so is inherently system-specific; + to supply the path in a system-independent manner, + give &cv-JAVAPROCESSORPATH; as a list of paths instead. + + + New in version 4.5.0 @@ -3640,24 +3770,31 @@ Specifies the list of directories that - will be searched for input - .java - file. - The directories in this list will be added to the - &javac; command line - via the option. - The individual directory names will be - separated by the operating system's path separate character - (: on UNIX/Linux/POSIX, - ; - on Windows). + will be searched for input (source) + .java files. + Can be specified as a string or Node object, + or as a list of strings or Node objects. + + + The value will be added to the JDK command lines + via the option, + which requires a system-specific search path separator, + This will be supplied by &SCons; as needed when it + constructs the command line if &cv-JAVASOURCEPATH; is + provided in list form. + If &cv-JAVASOURCEPATH; is a single string containing + search path separator characters + (: for POSIX systems or + ; for Windows), it will not be modified, + and so is inherently system-specific; + to supply the path in a system-independent manner, + give &cv-JAVASOURCEPATH; as a list of paths instead. - - Note that this currently just adds the specified - directory via the option. + Note that the specified directories are only added to + the command line via the option. &SCons; does not currently search the - &cv-JAVASOURCEPATH; directories for dependency + &cv-JAVASOURCEPATH; directories for dependent .java files. @@ -4720,11 +4857,11 @@ when calling the Microsoft Visual C/C++ compiler. All compilations of source files from the same source directory that generate target files in a same output directory -and were configured in SCons using the same construction environment +and were configured in SCons using the same &consenv; will be built in a single call to the compiler. Only source files that have changed since their object files were built will be passed to each compiler invocation -(via the &cv-link-CHANGED_SOURCES; construction variable). +(via the &cv-link-CHANGED_SOURCES; &consvar;). Any compilations where the object (target) file base name (minus the .obj) does not match the source file base name @@ -4839,6 +4976,8 @@ subject to the conditions listed above. The default &scons; behavior may change in the future. +New in version 4.4 + @@ -4851,8 +4990,8 @@ &cv-MSVC_SCRIPT_ARGS; is available for msvc batch file arguments that do not have first-class support -via construction variables or when there is an issue with the appropriate construction variable validation. -When available, it is recommended to use the appropriate construction variables (e.g., &cv-link-MSVC_TOOLSET_VERSION;) +via &consvars; or when there is an issue with the appropriate &consvar; validation. +When available, it is recommended to use the appropriate &consvars; (e.g., &cv-link-MSVC_TOOLSET_VERSION;) rather than &cv-MSVC_SCRIPT_ARGS; arguments. @@ -4976,6 +5115,8 @@ +New in version 4.4 + @@ -5028,6 +5169,9 @@ Suppress msvc batch file error messages. + +New in version 4.4 + @@ -5102,6 +5246,8 @@ +New in version 4.4 + @@ -5221,6 +5367,8 @@ +New in version 4.4 + @@ -5302,6 +5450,8 @@ +New in version 4.4 + @@ -5473,6 +5623,8 @@ +New in version 4.4 + @@ -5500,7 +5652,7 @@ Setting &cv-MSVC_USE_SCRIPT; to None bypasses the Visual Studio autodetection entirely; -use this if you are running SCons in a Visual Studio cmd +use this if you are running &SCons; in a Visual Studio cmd window and importing the shell's environment variables - that is, if you are sure everything is set correctly already and you don't want &SCons; to change anything. @@ -5508,6 +5660,12 @@ &cv-MSVC_USE_SCRIPT; ignores &cv-link-MSVC_VERSION; and &cv-link-TARGET_ARCH;. + +Changed in version 4.4: +new &cv-link-MSVC_USE_SCRIPT_ARGS; provides a +way to pass arguments. + + @@ -5517,6 +5675,9 @@ Provides arguments passed to the script &cv-link-MSVC_USE_SCRIPT;. + +New in version 4.4 + @@ -5598,11 +5759,15 @@ The burden is on the user to ensure the dictionary contents are minimally sufficient to ensure successful builds. - + + + +New in version 4.4 + @@ -5683,12 +5848,10 @@ Sets the preferred version of Microsoft Visual C/C++ to use. - - - +If the specified version is unavailable (not installed, +or not discoverable), tool initialization will fail. If &cv-MSVC_VERSION; is not set, SCons will (by default) select the -latest version of Visual C/C++ installed on your system. If the -specified version isn't installed, tool initialization will fail. +latest version of Visual C/C++ installed on your system. @@ -5701,28 +5864,186 @@ -Valid values for Windows are -14.3, -14.2, -14.1, -14.1Exp, -14.0, -14.0Exp, -12.0, -12.0Exp, -11.0, -11.0Exp, -10.0, -10.0Exp, -9.0, -9.0Exp, -8.0, -8.0Exp, -7.1, -7.0, -and 6.0. -Versions ending in Exp refer to "Express" or -"Express for Desktop" editions. +The valid values for &cv-MSVC_VERSION; represent major versions +of the compiler, except that versions ending in Exp +refer to "Express" or "Express for Desktop" Visual Studio editions, +which require distict entries because they use a different +filesystem layout and have some feature limitations compared to +the full version. +The following table shows correspondence +of the selector string to various version indicators +('x' is used as a placeholder for +a single digit that can vary). +Note that it is not necessary to install Visual Studio +to build with &SCons; (for example, you can install only +Build Tools), but if Visual Studio is installed, +additional builders such as &b-link-MSVSSolution; and +&b-link-MSVSProject; become avaialable and will +correspond to the indicated versions. + + + + + + + + + + + + SCons Key + MSVC++ Version + _MSVC_VER + VS Product + MSBuild/VS Version + + + + + 14.3 + 14.3x + 193x + Visual Studio 2022 + 17.x + + + 14.2 + 14.2x + 192x + Visual Studio 2019 + 16.x, 16.1x + + + 14.1 + 14.1 or 14.1x + 191x + Visual Studio 2017 + 15.x + + + 14.1Exp + 14.1 + 1910 + Visual Studio 2017 Express + 15.0 + + + 14.0 + 14.0 + 1900 + Visual Studio 2015 + 14.0 + + + 14.0Exp + 14.0 + 1900 + Visual Studio 2015 Express + 14.0 + + + 12.0 + 12.0 + 1800 + Visual Studio 2013 + 12.0 + + + 12.0Exp + 12.0 + 1800 + Visual Studio 2013 Express + 12.0 + + + 11.0 + 11.0 + 1700 + Visual Studio 2012 + 11.0 + + + 11.0Exp + 11.0 + 1700 + Visual Studio 2012 Express + 11.0 + + + 10.0 + 10.0 + 1600 + Visual Studio 2010 + 10.0 + + + 10.0Exp + 10.0 + 1600 + Visual C++ Express 2010 + 10.0 + + + 9.0 + 9.0 + 1500 + Visual Studio 2008 + 9.0 + + + 9.0Exp + 9.0 + 1500 + Visual C++ Express 2008 + 9.0 + + + 8.0 + 8.0 + 1400 + Visual Studio 2005 + 8.0 + + + 8.0Exp + 8.0 + 1400 + Visual C++ Express 2005 + 8.0 + + + 7.1 + 7.1 + 1300 + Visual Studio .NET 2003 + 7.1 + + + 7.0 + 7.0 + 1200 + Visual Studio .NET 2002 + 7.0 + + + 6.0 + 6.0 + 1100 + Visual Studio 6.0 + 6.0 + + + + + + +The compilation environment can be further or more precisely specified through the +use of several other &consvars;: see the descriptions of +&cv-link-MSVC_TOOLSET_VERSION;, +&cv-link-MSVC_SDK_VERSION;, +&cv-link-MSVC_USE_SCRIPT;, +&cv-link-MSVC_USE_SCRIPT_ARGS;, +and &cv-link-MSVC_USE_SETTINGS;. @@ -5737,28 +6058,39 @@ - VERSION + VERSION + the version of MSVS being used (can be set via - &cv-link-MSVS_VERSION;) + &cv-link-MSVC_VERSION;) - - VERSIONS + + + VERSIONS + the available versions of MSVS installed - - VCINSTALLDIR + + + VCINSTALLDIR + installed directory of Visual C++ - - VSINSTALLDIR + + + VSINSTALLDIR + installed directory of Visual Studio - - FRAMEWORKDIR + + + FRAMEWORKDIR + installed directory of the .NET framework - - FRAMEWORKVERSIONS + + + FRAMEWORKVERSIONS + list of installed versions of the .NET framework, sorted latest to oldest. @@ -5795,7 +6127,12 @@ - If a value is not set, it was not available in the registry. + If a value is not set, it was not available in the registry. + Visual Studio 2017 and later do not use the registry for + primary storage of this information, so typically for these + versions only PROJECTSUFFIX and + SOLUTIONSUFFIX will be set. + @@ -5821,7 +6158,7 @@ The string placed in a generated -Microsoft Visual Studio project file as the value of the +Microsoft Visual C++ project file as the value of the ProjectGUID attribute. There is no default value. If not defined, a new GUID is generated. @@ -5835,9 +6172,9 @@ The path name placed in a generated -Microsoft Visual Studio project file as the value of the +Microsoft Visual C++ project file as the value of the SccAuxPath attribute if the - MSVS_SCC_PROVIDER construction variable is + MSVS_SCC_PROVIDER &consvar; is also set. There is no default value. @@ -5852,7 +6189,7 @@ The root path of projects in your SCC workspace, i.e the path under which all project and solution files will be generated. It is used as a reference path from which the - relative paths of the generated Microsoft Visual Studio project + relative paths of the generated Microsoft Visual C++ project and solution files are computed. The relative project file path is placed as the value of the SccLocalPath attribute of the project file and as the values of the @@ -5865,7 +6202,7 @@ to the number of projects in the solution) attributes of the GlobalSection(SourceCodeControl) section of the Microsoft Visual Studio solution file. This is used only if - the MSVS_SCC_PROVIDER construction variable is + the MSVS_SCC_PROVIDER &consvar; is also set. The default value is the current working directory. @@ -5876,9 +6213,9 @@ The project name placed in a generated Microsoft - Visual Studio project file as the value of the + Visual C++ project file as the value of the SccProjectName attribute if the - MSVS_SCC_PROVIDER construction variable + MSVS_SCC_PROVIDER &consvar; is also set. In this case the string is also placed in the SccProjectName0 attribute of the GlobalSection(SourceCodeControl) section @@ -5893,7 +6230,7 @@ The string placed in a generated Microsoft - Visual Studio project file as the value of the + Visual C++ project file as the value of the SccProvider attribute. The string is also placed in the SccProvider0 attribute of the GlobalSection(SourceCodeControl) @@ -5906,23 +6243,25 @@ MSVS_VERSION - Sets the preferred version of Microsoft Visual Studio to use. + Set the preferred version of Microsoft Visual Studio to use. If &cv-MSVS_VERSION; is not set, &SCons; will (by default) select the latest version of Visual Studio installed on your system. So, if you have version 6 and version 7 (MSVS .NET) installed, it will prefer version 7. You can override this by - specifying the MSVS_VERSION variable in the - Environment initialization, setting it to the appropriate + specifying the &cv-link-MSVS_VERSION; variable when + initializing the Environment, setting it to the appropriate version ('6.0' or '7.0', for example). If the specified version isn't installed, tool initialization will fail. - This is obsolete: use &cv-MSVC_VERSION; instead. If - &cv-MSVS_VERSION; is set and &cv-MSVC_VERSION; is - not, &cv-MSVC_VERSION; will be set automatically to - &cv-MSVS_VERSION;. If both are set to different values, - scons will raise an error. + Deprecated since 1.3.0: + &cv-MSVS_VERSION; is deprecated in favor of &cv-link-MSVC_VERSION;. + As a transitional aid, if &cv-MSVS_VERSION; is set + and &cv-MSVC_VERSION; is not, + &cv-MSVC_VERSION; will be initialized to the value + of &cv-MSVS_VERSION;. + An error is raised if If both are set and have different values, @@ -5932,8 +6271,8 @@ The build command line placed in a generated Microsoft Visual - Studio project file. The default is to have Visual Studio - invoke SCons with any specified build targets. + C++ project file. The default is to have Visual Studio + invoke &SCons; with any specified build targets. @@ -5943,9 +6282,9 @@ The clean command line placed in a generated Microsoft Visual - Studio project file. The default is to have Visual Studio - invoke SCons with the -c option to remove any specified - targets. + C++ project file. The default is to have Visual Studio + invoke &SCons; with the option to remove + any specified targets. @@ -5955,7 +6294,7 @@ The encoding string placed in a generated Microsoft - Visual Studio project file. The default is encoding + Visual C++ project file. The default is encoding Windows-1252. @@ -5964,7 +6303,7 @@ MSVSPROJECTCOM - The action used to generate Microsoft Visual Studio project files. + The action used to generate Microsoft Visual C++ project files. @@ -5972,11 +6311,12 @@ MSVSPROJECTSUFFIX - The suffix used for Microsoft Visual Studio project (DSP) - files. The default value is .vcproj - when using Visual Studio version 7.x (.NET) or later version, - and .dsp when using earlier versions of - Visual Studio. + The suffix used for Microsoft Visual C++ project (DSP) + files. The default value is + .vcxproj when using Visual Studio 2010 + and later, .vcproj + when using Visual Studio versions between 2002 and 2008, + and .dsp when using Visual Studio 6.0. @@ -5986,8 +6326,8 @@ The rebuild command line placed in a generated Microsoft - Visual Studio project file. The default is to have Visual - Studio invoke SCons with any specified rebuild targets. + Visual C++ project file. The default is to have Visual + Studio invoke &SCons; with any specified rebuild targets. @@ -5997,8 +6337,8 @@ MSVSSCONS - The SCons used in generated Microsoft Visual Studio project - files. The default is the version of SCons being used to + The &SCons; used in generated Microsoft Visual C++ project + files. The default is the version of &SCons; being used to generate the project file. @@ -6008,8 +6348,8 @@ MSVSSCONSCOM - The default SCons command used in generated Microsoft Visual - Studio project files. + The default &SCons; command used in generated Microsoft Visual + C++ project files. @@ -6019,10 +6359,10 @@ The sconscript file (that is, &SConstruct; or &SConscript; - file) that will be invoked by Visual Studio project files + file) that will be invoked by Visual C++ project files (through the &cv-link-MSVSSCONSCOM; variable). The default is the same sconscript file that contains the call to - &b-MSVSProject; to build the project file. + &b-link-MSVSProject; to build the project file. @@ -6031,7 +6371,7 @@ MSVSSCONSFLAGS - The SCons flags used in generated Microsoft Visual Studio project files. + The &SCons; flags used in generated Microsoft Visual C++ project files. @@ -6049,9 +6389,8 @@ The suffix used for Microsoft Visual Studio solution (DSW) files. The default value is .sln - when using Visual Studio version 7.x (.NET), and - .dsw when using earlier versions of - Visual Studio. + when using Visual Studio version 7.x (.NET 2002) and later, + and .dsw when using Visual Studio 6.0. @@ -6475,9 +6814,9 @@ PCHPDBFLAGS -A construction variable that, when expanded, +A &consvar; that, when expanded, adds the flag to the command line -only if the &cv-link-PDB; construction variable is set. +only if the &cv-link-PDB; &consvar; is set. @@ -6909,51 +7248,63 @@ - + - QT_AUTOSCAN + QT3_AUTOSCAN Turn off scanning for mocable files. Use the &b-link-Moc; Builder to explicitly specify files to run moc on. + +Changed in 4.5.0: renamed from QT_AUTOSCAN. + - + - QT_BINPATH + QT3_BINPATH The path where the Qt binaries are installed. -The default value is '&cv-link-QTDIR;/bin'. +The default value is '&cv-link-QT3DIR;/bin'. + + +Changed in 4.5.0: renamed from QT_BINPATH. - + - QT_CPPPATH + QT3_CPPPATH The path where the Qt header files are installed. -The default value is '&cv-link-QTDIR;/include'. +The default value is '&cv-link-QT3DIR;/include'. Note: If you set this variable to None, the tool won't change the &cv-link-CPPPATH; construction variable. + +Changed in 4.5.0: renamed from QT_CPPPATH. + - + - QT_DEBUG + QT3_DEBUG Prints lots of debugging information while scanning for moc files. + +Changed in 4.5.0: renamed from QT_DEBUG. + - + - QT_LIB + QT3_LIB Default value is 'qt'. @@ -6961,33 +7312,39 @@ Note: If you set this variable to None, the tool won't change the &cv-link-LIBS; variable. + +Changed in 4.5.0: renamed from QT_LIB. + - + - QT_LIBPATH + QT3_LIBPATH The path where the Qt libraries are installed. -The default value is '&cv-link-QTDIR;/lib'. +The default value is '&cv-link-QT3DIR;/lib'. Note: If you set this variable to None, the tool won't change the &cv-link-LIBPATH; construction variable. + +Changed in 4.5.0: renamed from QT_LIBPATH. + - + - QT_MOC + QT3_MOC -Default value is '&cv-link-QT_BINPATH;/moc'. +Default value is '&cv-link-QT3_BINPATH;/moc'. - + - QT_MOCCXXPREFIX + QT3_MOCCXXPREFIX Default value is ''. @@ -6995,204 +7352,264 @@ - + - QT_MOCCXXSUFFIX + QT3_MOCCXXSUFFIX Default value is '.moc'. Suffix for moc output files when source is a C++ file. + +Changed in 4.5.0: renamed from QT_MOCCXXSUFFIX. + - + - QT_MOCFROMCXXCOM + QT3_MOCFROMCXXCOM Command to generate a moc file from a C++ file. + +Changed in 4.5.0: renamed from QT_MOCFROMCXXCOM. + - + - QT_MOCFROMCXXCOMSTR + QT3_MOCFROMCXXCOMSTR The string displayed when generating a moc file from a C++ file. -If this is not set, then &cv-link-QT_MOCFROMCXXCOM; (the command line) is displayed. +If this is not set, then &cv-link-QT3_MOCFROMCXXCOM; (the command line) is displayed. + + +Changed in 4.5.0: renamed from QT_MOCFROMCXXCOMSTR. - + - QT_MOCFROMCXXFLAGS + QT3_MOCFROMCXXFLAGS Default value is '-i'. These flags are passed to moc when moccing a C++ file. + +Changed in 4.5.0: renamed from QT_MOCFROMCXXFLAGS. + - + - QT_MOCFROMHCOM + QT3_MOCFROMHCOM Command to generate a moc file from a header. + +Changed in 4.5.0: renamed from QT_MOCFROMSHCOM. + - + - QT_MOCFROMHCOMSTR + QT3_MOCFROMHCOMSTR The string displayed when generating a moc file from a C++ file. -If this is not set, then &cv-link-QT_MOCFROMHCOM; (the command line) is displayed. +If this is not set, then &cv-link-QT3_MOCFROMHCOM; (the command line) is displayed. + + +Changed in 4.5.0: renamed from QT_MOCFROMSHCOMSTR. - + - QT_MOCFROMHFLAGS + QT3_MOCFROMHFLAGS Default value is ''. These flags are passed to moc when moccing a header file. + +Changed in 4.5.0: renamed from QT_MOCFROMSHFLAGS. + - + - QT_MOCHPREFIX + QT3_MOCHPREFIX Default value is 'moc_'. Prefix for moc output files when source is a header. + +Changed in 4.5.0: renamed from QT_MOCHPREFIX. + - + - QT_MOCHSUFFIX + QT3_MOCHSUFFIX Default value is '&cv-link-CXXFILESUFFIX;'. Suffix for moc output files when source is a header. + +Changed in 4.5.0: renamed from QT_MOCHSUFFIX. + - + - QT_UIC + QT3_UIC -Default value is '&cv-link-QT_BINPATH;/uic'. +Default value is '&cv-link-QT3_BINPATH;/uic'. + + +Changed in 4.5.0: renamed from QT_UIC. - + - QT_UICCOM + QT3_UICCOM Command to generate header files from .ui files. + +Changed in 4.5.0: renamed from QT_UICCOM. + - + - QT_UICCOMSTR + QT3_UICCOMSTR The string displayed when generating header files from .ui files. -If this is not set, then &cv-link-QT_UICCOM; (the command line) is displayed. +If this is not set, then &cv-link-QT3_UICCOM; (the command line) is displayed. + + +Changed in 4.5.0: renamed from QT_UICCOMSTR. - + - QT_UICDECLFLAGS + QT3_UICDECLFLAGS Default value is ''. These flags are passed to uic when creating a header file from a .ui file. + +Changed in 4.5.0: renamed from QT_UICDECLFLAGS. + - + - QT_UICDECLPREFIX + QT3_UICDECLPREFIX Default value is ''. Prefix for uic generated header files. + +Changed in 4.5.0: renamed from QT_UICDECLPREFIX. + - + - QT_UICDECLSUFFIX + QT3_UICDECLSUFFIX Default value is '.h'. Suffix for uic generated header files. + +Changed in 4.5.0: renamed from QT_UICDECLSUFFIX. + - + - QT_UICIMPLFLAGS + QT3_UICIMPLFLAGS Default value is ''. These flags are passed to uic when creating a C++ file from a .ui file. + +Changed in 4.5.0: renamed from QT_UICIMPFLAGS. + - + - QT_UICIMPLPREFIX + QT3_UICIMPLPREFIX Default value is 'uic_'. Prefix for uic generated implementation files. + +Changed in 4.5.0: renamed from QT_UICIMPLPREFIX. + - + - QT_UICIMPLSUFFIX + QT3_UICIMPLSUFFIX Default value is '&cv-link-CXXFILESUFFIX;'. Suffix for uic generated implementation files. + +Changed in 4.5.0: renamed from QT_UICIMPLSUFFIX. + - + - QT_UISUFFIX + QT3_UISUFFIX Default value is '.ui'. Suffix of designer input files. + +Changed in 4.5.0: renamed from QT_UISUFFIX. + - + - QTDIR + QT3DIR The path to the Qt installation to build against. If not already set, -&t-link-qt; tool tries to obtain this from +&t-link-qt3; tool tries to obtain this from os.environ; if not found there, it tries to make a guess. + +Changed in 4.5.0: renamed from QTDIR. + @@ -7281,7 +7698,7 @@ RCINCFLAGS -An automatically-generated construction variable +An automatically-generated &consvar; containing the command-line options for specifying directories to be searched by the resource compiler. @@ -7301,7 +7718,7 @@ The prefix (flag) used to specify an include directory on the resource compiler command line. This will be prepended to the beginning of each directory -in the &cv-link-CPPPATH; construction variable +in the &cv-link-CPPPATH; &consvar; when the &cv-link-RCINCFLAGS; variable is expanded. @@ -7314,7 +7731,7 @@ The suffix used to specify an include directory on the resource compiler command line. This will be appended to the end of each directory -in the &cv-link-CPPPATH; construction variable +in the &cv-link-CPPPATH; &consvar; when the &cv-link-RCINCFLAGS; variable is expanded. @@ -7411,7 +7828,9 @@ -env = Environment(RMICCOMSTR = "Generating stub/skeleton class files $TARGETS from $SOURCES") +env = Environment( + RMICCOMSTR="Generating stub/skeleton class files $TARGETS from $SOURCES" +) @@ -7574,11 +7993,11 @@ SCONS_HOME - The (optional) path to the SCons library directory, + The (optional) path to the &SCons; library directory, initialized from the external environment. If set, this is used to construct a shorter and more efficient search path in the &cv-link-MSVSSCONS; command line executed from Microsoft - Visual Studio project files. + Visual C++ project files. @@ -7790,21 +8209,39 @@ SHELL_ENV_GENERATORS -Must be a list (or an iterable) containing functions where each function generates or -alters the environment dictionary which will be used -when executing the &cv-link-SPAWN; function. The functions will initially -be passed a reference of the current execution environment (e.g. env['ENV']), -and each called while iterating the list. Each function must return a dictionary -which will then be passed to the next function iterated. The return dictionary -should contain keys which represent the environment variables and their respective -values. - -This primary purpose of this construction variable is to give the user the ability -to substitute execution environment variables based on env, targets, and sources. -If desired, the user can completely customize the execution environment for particular -targets. +A hook allowing the execution environment to be modified prior +to the actual execution of a command line from an action +via the spawner function defined by &cv-link-SPAWN;. +Allows substitution based on targets and sources, +as well as values from the &consenv;, +adding extra environment variables, etc. + +The value must be a list (or other iterable) +of functions which each generate or +alter the execution environment dictionary. +The first function will be passed a copy of the initial execution environment +(&cv-link-ENV; in the current &consenv;); +the dictionary returned by that function is passed to the next, +until the iterable is exhausted and the result returned +for use by the command spawner. +The original execution environment is not modified. + + + +Each function provided in &cv-SHELL_ENV_GENERATORS; must accept four +arguments and return a dictionary: +env is the &consenv; for this action; +target is the list of targets associated with this action; +source is the list of sources associated with this action; +and shell_env is the current dictionary after iterating +any previous &cv-SHELL_ENV_GENERATORS; functions +(this can be compared to the original execution environment, +which is available as env['ENV'], to detect any changes). + + + Example: def custom_shell_env(env, target, source, shell_env): """customize shell_env if desired""" @@ -7815,24 +8252,7 @@ env["SHELL_ENV_GENERATORS"] = [custom_shell_env] - - env -The SCons construction environment from which the -execution environment can be derived from. - - - target -The list of targets associated with this action. - - - source -The list of sources associated with this action. - - - shell_env -The current shell_env after iterating other SHELL_ENV_GENERATORS functions. This can be compared -to the passed env['ENV'] to detect any changes. - + Available since 4.4 diff -Nru scons-4.4.0+dfsg/doc/generated/variables.mod scons-4.5.2+dfsg/doc/generated/variables.mod --- scons-4.4.0+dfsg/doc/generated/variables.mod 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/generated/variables.mod 2023-03-21 16:17:04.000000000 +0000 @@ -184,6 +184,7 @@ $F95PPCOMSTR"> $F95PPFILESUFFIXES"> $File"> +$FILE_ENCODING"> $FORTRAN"> $FORTRANCOM"> $FORTRANCOMMONFLAGS"> @@ -245,6 +246,7 @@ $JAVAHCOMSTR"> $JAVAHFLAGS"> $JAVAINCLUDES"> +$JAVAPROCESSORPATH"> $JAVASOURCEPATH"> $JAVASUFFIX"> $JAVAVERSION"> @@ -419,34 +421,34 @@ $PSCOMSTR"> $PSPREFIX"> $PSSUFFIX"> -$QT_AUTOSCAN"> -$QT_BINPATH"> -$QT_CPPPATH"> -$QT_DEBUG"> -$QT_LIB"> -$QT_LIBPATH"> -$QT_MOC"> -$QT_MOCCXXPREFIX"> -$QT_MOCCXXSUFFIX"> -$QT_MOCFROMCXXCOM"> -$QT_MOCFROMCXXCOMSTR"> -$QT_MOCFROMCXXFLAGS"> -$QT_MOCFROMHCOM"> -$QT_MOCFROMHCOMSTR"> -$QT_MOCFROMHFLAGS"> -$QT_MOCHPREFIX"> -$QT_MOCHSUFFIX"> -$QT_UIC"> -$QT_UICCOM"> -$QT_UICCOMSTR"> -$QT_UICDECLFLAGS"> -$QT_UICDECLPREFIX"> -$QT_UICDECLSUFFIX"> -$QT_UICIMPLFLAGS"> -$QT_UICIMPLPREFIX"> -$QT_UICIMPLSUFFIX"> -$QT_UISUFFIX"> -$QTDIR"> +$QT3_AUTOSCAN"> +$QT3_BINPATH"> +$QT3_CPPPATH"> +$QT3_DEBUG"> +$QT3_LIB"> +$QT3_LIBPATH"> +$QT3_MOC"> +$QT3_MOCCXXPREFIX"> +$QT3_MOCCXXSUFFIX"> +$QT3_MOCFROMCXXCOM"> +$QT3_MOCFROMCXXCOMSTR"> +$QT3_MOCFROMCXXFLAGS"> +$QT3_MOCFROMHCOM"> +$QT3_MOCFROMHCOMSTR"> +$QT3_MOCFROMHFLAGS"> +$QT3_MOCHPREFIX"> +$QT3_MOCHSUFFIX"> +$QT3_UIC"> +$QT3_UICCOM"> +$QT3_UICCOMSTR"> +$QT3_UICDECLFLAGS"> +$QT3_UICDECLPREFIX"> +$QT3_UICDECLSUFFIX"> +$QT3_UICIMPLFLAGS"> +$QT3_UICIMPLPREFIX"> +$QT3_UICIMPLSUFFIX"> +$QT3_UISUFFIX"> +$QT3DIR"> $RANLIB"> $RANLIBCOM"> $RANLIBCOMSTR"> @@ -858,6 +860,7 @@ $F95PPCOMSTR"> $F95PPFILESUFFIXES"> $File"> +$FILE_ENCODING"> $FORTRAN"> $FORTRANCOM"> $FORTRANCOMMONFLAGS"> @@ -919,6 +922,7 @@ $JAVAHCOMSTR"> $JAVAHFLAGS"> $JAVAINCLUDES"> +$JAVAPROCESSORPATH"> $JAVASOURCEPATH"> $JAVASUFFIX"> $JAVAVERSION"> @@ -1093,34 +1097,34 @@ $PSCOMSTR"> $PSPREFIX"> $PSSUFFIX"> -$QT_AUTOSCAN"> -$QT_BINPATH"> -$QT_CPPPATH"> -$QT_DEBUG"> -$QT_LIB"> -$QT_LIBPATH"> -$QT_MOC"> -$QT_MOCCXXPREFIX"> -$QT_MOCCXXSUFFIX"> -$QT_MOCFROMCXXCOM"> -$QT_MOCFROMCXXCOMSTR"> -$QT_MOCFROMCXXFLAGS"> -$QT_MOCFROMHCOM"> -$QT_MOCFROMHCOMSTR"> -$QT_MOCFROMHFLAGS"> -$QT_MOCHPREFIX"> -$QT_MOCHSUFFIX"> -$QT_UIC"> -$QT_UICCOM"> -$QT_UICCOMSTR"> -$QT_UICDECLFLAGS"> -$QT_UICDECLPREFIX"> -$QT_UICDECLSUFFIX"> -$QT_UICIMPLFLAGS"> -$QT_UICIMPLPREFIX"> -$QT_UICIMPLSUFFIX"> -$QT_UISUFFIX"> -$QTDIR"> +$QT3_AUTOSCAN"> +$QT3_BINPATH"> +$QT3_CPPPATH"> +$QT3_DEBUG"> +$QT3_LIB"> +$QT3_LIBPATH"> +$QT3_MOC"> +$QT3_MOCCXXPREFIX"> +$QT3_MOCCXXSUFFIX"> +$QT3_MOCFROMCXXCOM"> +$QT3_MOCFROMCXXCOMSTR"> +$QT3_MOCFROMCXXFLAGS"> +$QT3_MOCFROMHCOM"> +$QT3_MOCFROMHCOMSTR"> +$QT3_MOCFROMHFLAGS"> +$QT3_MOCHPREFIX"> +$QT3_MOCHSUFFIX"> +$QT3_UIC"> +$QT3_UICCOM"> +$QT3_UICCOMSTR"> +$QT3_UICDECLFLAGS"> +$QT3_UICDECLPREFIX"> +$QT3_UICDECLSUFFIX"> +$QT3_UICIMPLFLAGS"> +$QT3_UICIMPLPREFIX"> +$QT3_UICIMPLSUFFIX"> +$QT3_UISUFFIX"> +$QT3DIR"> $RANLIB"> $RANLIBCOM"> $RANLIBCOMSTR"> diff -Nru scons-4.4.0+dfsg/doc/man/scons.xml scons-4.5.2+dfsg/doc/man/scons.xml --- scons-4.4.0+dfsg/doc/man/scons.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/man/scons.xml 2023-03-21 16:17:04.000000000 +0000 @@ -106,15 +106,16 @@ details do not have to be recalculated each run. -&scons; requires Python 3.6 or later to run; -there should be no other dependencies or requirements. +&scons; requires &Python; 3.6 or later to run; +there should be no other dependencies or requirements, unless the experimental Ninja tool is used. - -Support for Python 3.5 is removed since -&SCons; 4.3.0. -The CPython project has retired 3.5: + + +Changed in version 4.3.0: +support for &Python; 3.5 is removed; +the CPython project has retired 3.5: . - + You set up an &SCons; build system by writing a script @@ -144,46 +145,64 @@ Information about files involved in the build, -including a cryptographic hash of the contents, +including a cryptographic hash of the contents of source files, is cached for later reuse. By default this hash (the &contentsig;) is used to determine if a file has changed since the last build, -but this can be controlled by selecting an appropriate +although this can be controlled by selecting an appropriate &f-link-Decider; function. Implicit dependency files are also part of out-of-date computation. The scanned implicit dependency information can optionally be cached and used to speed up future builds. -A hash of each executed build action (the &buildsig; +A hash of each executed build action (the &buildsig;) is cached, so that changes to build instructions (changing flags, etc.) or to the build tools themselves (new version) can also trigger a rebuild. +&SCons; supports the concept of separated source and build +directories through the definition of +variant directories +(see the &f-link-VariantDir; function). + + + When invoked, &scons; looks for a file named &SConstruct; in the current directory and reads the build configuration from that file (other names are allowed, -see for more information). -The &SConstruct; +see +and the option +for more information). +The build may be structured in a hierarchical manner: +the &SConstruct; file may specify subsidiary configuration files by calling the -&f-link-SConscript; function. +&f-link-SConscript; function, +and these may, in turn, do the same. By convention, these subsidiary files are named &SConscript;, although any name may be used. As a result of this naming convention, -the term SConscript files +the term &SConscript; files is used to refer generically to the complete set of configuration files for a project (including the &SConstruct; file), -regardless of the actual file names or number of such files. +regardless of the actual file names or number of such files. +A hierarchical build is not recursive - all of +the SConscript files are processed in a single pass, +although each is processed in a separate context so +as not to interfere with one another. &SCons; provides +mechanisms for information to be shared between +SConscript files when needed. + -Before reading the SConscript files, +Before reading the &SConscript; files, &scons; looks for a directory named site_scons @@ -193,9 +212,9 @@ directory from the option instead, and prepends the ones it -finds to the Python module search path (sys.path), +finds to the &Python; module search path (sys.path), thus allowing modules in such directories to be imported in -the normal Python way in SConscript files. +the normal &Python; way in &SConscript; files. For each found site directory, (1) if it contains a file site_init.py that file is evaluated, @@ -211,24 +230,24 @@ -SConscript files are written in the -Python programming language, -although it is normally not necessary to be a Python +&SConscript; files are written in the +&Python; programming language, +although it is normally not necessary to be a &Python; programmer to use &scons; effectively. -SConscript files are invoked in a context that makes +&SConscript; files are invoked in a context that makes the facilities described in this manual page available in their local namespace without any special steps. -Standard Python scripting capabilities -such as flow control, data manipulation, and imported Python libraries +Standard &Python; scripting capabilities +such as flow control, data manipulation, and imported &Python; libraries are available to use to handle complicated build situations. -Other Python files can be made a part of the build system, +Other &Python; files can be made a part of the build system, but they do not automatically have the &SCons; context and need to import it if they need access (described later). &scons; -reads and executes all of the included SConscript files +reads and executes all of the included &SConscript; files before it begins building any targets. To make this clear, @@ -338,7 +357,7 @@ Any variable argument assignments are collected, and remaining arguments are taken as targets to build. -Values of variables to be passed to the SConscript files +Values of variables to be passed to the &SConscript; files may be specified on the command line: @@ -347,7 +366,7 @@ These variables are available through the &ARGUMENTS; dictionary, -and can be used in the SConscript files to modify +and can be used in the &SConscript; files to modify the build in any way: @@ -373,7 +392,7 @@ &scons; can maintain a cache of target (derived) files that can be shared between multiple builds. When derived-file caching is enabled in an -SConscript file, any target files built by +&SConscript; file, any target files built by &scons; will be copied to the cache. If an up-to-date target file is found in the cache, it @@ -449,7 +468,7 @@ If no targets are specified on the command line, &scons; will select those targets -specified in the SConscript files via calls +specified in the &SConscript; files via calls to the &f-link-Default; function. These are known as the default targets, and are made available in the @@ -460,11 +479,11 @@ If no targets are selected by the previous steps, &scons; selects the current directory for scanning, -unless command-line options which affect the target -scan are detected -(, +unless command-line options which affect the directory +for target scanning are present +(, , -, +, ). Since targets thus selected were not the result of user instructions, this target list is not made available @@ -521,7 +540,7 @@ or by changing directory and invoking scons with the - + option, which traverses up the directory hierarchy until it finds the &SConstruct; @@ -542,7 +561,7 @@ requested, as &scons; needs to make sure any dependent files are built. -Specifying "cleanup" targets in SConscript files is +Specifying "cleanup" targets in &SConscript; files is usually not necessary. The @@ -564,7 +583,7 @@ Additional files or directories to remove can be specified using the -&f-link-Clean; function in the SConscript files. +&f-link-Clean; function in the &SConscript; files. Conversely, targets that would normally be removed by the invocation can be retained by calling the @@ -572,7 +591,7 @@ &scons; supports building multiple targets in parallel via a - + option that takes, as its argument, the number of simultaneous tasks that may be spawned: @@ -627,10 +646,10 @@ While clean mode removes targets rather than building them, -work which is done directly in Python code in SConscript files +work which is done directly in &Python; code in &SConscript; files will still be carried out. If it is important to avoid some such work from taking place in clean mode, it should be protected. -An SConscript file can determine which mode +An &SConscript; file can determine which mode is active by querying &f-link-GetOption;, as in the call if GetOption("clean"): @@ -638,7 +657,7 @@ - - + Write debug information about derived-file caching to the specified @@ -699,7 +718,7 @@ - + When using a derived-file cache show the command @@ -770,7 +789,7 @@ - + , @@ -794,12 +813,12 @@ directory is interpreted relative to the preceding one. This option is similar to using -, +, but does not search for any of the predefined &SConstruct; names in the specified directory. See also options -, +, and @@ -818,7 +837,7 @@ Works exactly the same way as the - + option except for the way default targets are handled. When this option is used and no targets are specified on the command line, all default targets are built, whether or not they are below the current @@ -844,7 +863,7 @@ This may be useful in debugging parallel builds. Implies the option. -Available since &scons; 3.1. +New in version 3.1. @@ -853,9 +872,9 @@ Print how many objects are created of the various classes used internally by SCons -before and after reading the SConscript files +before and after reading the &SConscript; files and before and after building targets. -This is not supported when SCons is executed with the Python +This is not supported when SCons is executed with the &Python; (optimized) option or when the SCons modules @@ -869,9 +888,10 @@ duplicate -Print a line for each unlink/relink (or copy) of a variant file from -its source file. Includes debugging info for unlinking stale variant -files, as well as unlinking old targets before building them. +Print a line for each unlink/relink (or copy) of a file in +a variant directory from its source file. +Includes debugging info for unlinking stale variant directory files, +as well as unlinking old targets before building them. @@ -923,7 +943,7 @@ memory Prints how much memory SCons uses -before and after reading the SConscript files +before and after reading the &SConscript; files and before and after building targets. @@ -940,8 +960,8 @@ pdb Re-run &scons; under the control of the -pdb -Python debugger. +pdb +&Python; debugger. @@ -980,7 +1000,7 @@ stacktrace -Prints an internal Python stack trace +Prints an internal &Python; stack trace when encountering an otherwise unexplained error. @@ -997,11 +1017,11 @@ The total build time (time SCons ran from beginning to end) -The total time spent reading and executing SConscript files +The total time spent reading and executing &SConscript; files The total time SCons itself spent running -(that is, not counting reading and executing SConscript files) +(that is, not counting reading and executing &SConscript; files) The total time spent executing all build commands @@ -1009,7 +1029,7 @@ The elapsed wall-clock time spent executing those build commands -The time spent processing each file passed to the &SConscriptFunc; function +The time spent processing each file passed to the &f-link-SConscript; function @@ -1125,15 +1145,18 @@ the special tokens all or none. A comma-separated string can be used to select multiple features. The default setting is none. - Current available features are: ninja. + Current available features are: + ninja (added in version 4.2), + tm_v2 (added in version 4.4.1). + No Support offered for any features or tools enabled by this flag. - Available since &scons; 4.2. + New in version 4.2 (experimental). - + , , @@ -1143,7 +1166,7 @@ Use file - as the initial SConscript file. + as the initial &SConscript; file. Multiple options may be specified, @@ -1154,14 +1177,14 @@ - + , Print a local help message for this project, -if one is defined in the SConscript files +if one is defined in the &SConscript; files (see the &f-link-Help; function), plus a line that refers to the standard &SCons; help message. If no local help message is defined, @@ -1171,7 +1194,7 @@ Exits after displaying the appropriate message. Note that use of this option requires &SCons; to process -the SConscript files, so syntax errors may cause +the &SConscript; files, so syntax errors may cause the help message not to be displayed. @@ -1193,7 +1216,7 @@ The default value is to use a chunk size of 64 kilobytes, which should be appropriate for most uses. -Available since &scons; 4.2. +New in version 4.2. @@ -1205,28 +1228,35 @@ Set the hashing algorithm used by SCons to ALGORITHM. This value determines the hashing algorithm used in generating -&contentsigs; or &CacheDir; keys. +&contentsigs;, &buildsigs; and &CacheDir; keys. -The supported list of values are: md5, sha1, and sha256. -However, the Python interpreter used to run SCons must have the corresponding +The supported list of values are: +md5, +sha1 +and sha256. +However, the &Python; interpreter used to run &scons; must have the corresponding support available in the hashlib module to use the specified algorithm. -Specifying this value changes the name of the SConsign database. -For example, will create a SConsign -database with name .sconsign_sha256.dblite. - -If this option is not specified, a the first supported hash format found -is selected. Typically this is MD5, however, if you are on a FIPS-compliant system -and using a version of Python less than 3.9, SHA1 or SHA256 will be chosen as the default. -Python 3.9 and onwards clients will always default to MD5, even in FIPS mode, unless -otherwise specified with the option. - -For MD5 databases (either explicitly specified with or -defaulted), the SConsign database is.sconsign.dblite. The newer SHA1 and -SHA256 selections meanwhile store their databases to .sconsign_algorithmname.dblite +If this option is omitted, +the first supported hash format found is selected. +Typically this is MD5, however, on a FIPS-compliant system +using a version of &Python; older than 3.9, +SHA1 or SHA256 is chosen as the default. +&Python; 3.9 and onwards clients always default to MD5, even in FIPS mode. + + +Specifying this option changes the name of the SConsign database. +The default database is .sconsign.dblite. +In the presence of this option, +ALGORITHM is +included in the name to indicate the difference, +even if the argument is md5. +For example, uses a SConsign +database named .sconsign_sha256.dblite. + -Available since &scons; 4.2. +New in version 4.2. @@ -1241,7 +1271,7 @@ - + , @@ -1252,7 +1282,7 @@ - + , @@ -1261,7 +1291,7 @@ Specifies a directory to search for -imported Python modules. If several +imported &Python; modules. If several options are used, the directories are searched in the order specified. @@ -1321,7 +1351,7 @@ - + @@ -1330,7 +1360,7 @@ to the installation paths such that all installed files will be placed under that directory. This option is unavailable if one of &b-link-Install;, &b-link-InstallAs; or -&b-link-InstallVersionedLib; is not used in the SConscript files. +&b-link-InstallVersionedLib; is not used in the &SConscript; files. @@ -1339,11 +1369,11 @@ Starts SCons in interactive mode. -The SConscript files are read once and a +The &SConscript; files are read once and a scons>>> prompt is printed. Targets may now be rebuilt by typing commands at interactive prompt -without having to re-read the SConscript files +without having to re-read the &SConscript; files and re-initialize the dependency graph from scratch. SCons interactive mode supports the following commands: @@ -1391,7 +1421,7 @@ but have no effect on the build command -(mainly because they affect how the SConscript files are read, +(mainly because they affect how the &SConscript; files are read, which only happens once at the beginning of interactive mode). @@ -1492,7 +1522,7 @@ - + , @@ -1512,7 +1542,7 @@ - + , @@ -1587,11 +1617,11 @@ . -Deprecated since &scons; 4.2. +Changed in version 4.2: deprecated. - + , , @@ -1600,24 +1630,29 @@ -Set no execute mode. +Set no-exec mode. Print the commands that would be executed to build -any out-of-date target files, but do not execute the commands. - -The output is a best effort, as &SCons; cannot always precisely -determine what would be built. For example, if a file is generated -by a builder action that is later used in the build, -that file is not available to scan for dependencies on an unbuilt tree, -or may contain out of date information in a built tree. +any out-of-date targets, but do not execute those commands. + -Work which is done directly in Python code in SConscript files, -as opposed to work done by builder actions during the build phase, -will still be carried out. If it is important to avoid some -such work from taking place in no execute mode, it should be protected. -An SConscript file can determine which mode -is active by querying &f-link-GetOption;, as in the call -if GetOption("no_exec"): +Only target building is suppressed - any work in the build +system that is done directly (in regular &Python; code) +will still be carried out. You can add guards around +code which should not be executed in no-exec mode by +checking the value of the option at run time with &f-link-GetOption;: + + +if not GetOption("no_exec"): + # run regular instructions + + +The output is a best effort, as &SCons; cannot always precisely +determine what would be built. For example, if a file generated +by a builder action is also used as a source in the build, +that file is not available to scan for dependencies at all +in an unbuilt tree, and may contain out of date information in a +previously built tree. @@ -1627,13 +1662,13 @@ Prevents the automatic addition of the standard site_scons -dirs to +directories to sys.path. Also prevents loading the site_scons/site_init.py modules if they exist, and prevents adding their site_scons/site_tools -dirs to the toolpath. +directories to the toolpath. @@ -1671,11 +1706,10 @@ -The type or types +The type of package to create when using the &b-link-Package; builder. -In the case of multiple types, type -should be a comma-separated string; &SCons; will try to build -for all of those packages. +Multiple types can be specified by using a comma-separated string, +in which case &SCons; will try to build for all of those package types. Note this option is only available if the &t-link-packaging; tool has been enabled. @@ -1685,15 +1719,14 @@ -Run SCons under the Python profiler -and save the results in the specified -file. -The results may be analyzed using the Python -pstats module. +Run &SCons; under the &Python; profiler +and save the results to file. +The results may be analyzed using the &Python; +pstats module. - + , @@ -1707,8 +1740,8 @@ -Quiets SCons status messages about -reading SConscript files, +Suppress status messages about +reading &SConscript; files, building targets and entering directories. Commands that are executed @@ -1731,7 +1764,7 @@ - + , , @@ -1744,7 +1777,7 @@ - + , , @@ -1868,7 +1901,7 @@ - + , @@ -1933,7 +1966,7 @@ it behaves as if all had been specified. -Available since &scons; 4.0. +New in version 4.0. @@ -1973,7 +2006,7 @@ - + , , @@ -1998,13 +2031,13 @@ option except for the way default targets are handled. When this option is used and no targets are specified on the command line, -all default targets that are defined in the SConscript(s) in the current +all default targets that are defined in the &SConscript; file(s) in the current directory are built, regardless of what directory the resultant targets end up in. - + , @@ -2175,7 +2208,7 @@ These warnings are disabled by default for the first phase of deprecation. Enable to be reminded about use of this tool module. -Available since &SCons; 4.3. +New in version 4.3. @@ -2183,7 +2216,7 @@ missing-sconscript -Warnings about missing SConscript files. +Warnings about missing &SConscript; files. These warnings are enabled by default. @@ -2195,16 +2228,16 @@ feature not working when &scons; -is run with the Python +is run with the &Python; -option or from optimized Python (.pyo) modules. +option or from optimized &Python; (.pyo) modules. no-parallel-support -Warnings about the version of Python +Warnings about the version of &Python; not being able to support parallel builds when the option is used. @@ -2216,7 +2249,7 @@ python-version Warnings about running -SCons with a deprecated version of Python. +&SCons; using a version of &Python; that has been deprecated. These warnings are enabled by default. @@ -2281,7 +2314,7 @@ - + , , @@ -2306,7 +2339,7 @@ SConscript Files The build configuration is described by one or more files, -known as SConscript files. +known as &SConscript; files. There must be at least one file for a valid build (&scons; will quit if it does not find one). &scons; by default looks for this file by the name @@ -2326,20 +2359,20 @@ -Each SConscript file in a build configuration is invoked +Each &SConscript; file in a build configuration is invoked independently in a separate context. This provides necessary isolation so that different parts of the build don't accidentally step on each other. You have to be explicit about sharing information, by using the &f-link-Export; function or the &exports; argument -to the &SConscript; function, as well as the &f-link-Return; function -in a called SConscript file, and comsume shared information by using the +to the &f-link-SConscript; function, as well as the &f-link-Return; function +in a called &SConscript; file, and comsume shared information by using the &f-link-Import; function. The following sections describe the various &SCons; facilities -that can be used in SConscript files. Quick links: +that can be used in &SConscript; files. Quick links: @@ -2538,7 +2571,7 @@ (more properly, tool specification modules) which are used to help initialize the &consenv;. An &SCons; tool is only responsible for setup. -For example, if an SConscript file declares +For example, if an &SConscript; file declares the need to construct an object file from a C-language source file by calling the &b-link-Object; builder, then a tool representing @@ -2726,7 +2759,7 @@ from the starting point, which is the top of the directory tree for an absolute path and the current directory for a relative path. The "current directory" in this context is the directory -of the SConscript file currently being processed. +of the &SConscript; file currently being processed. @@ -2860,7 +2893,7 @@ -Python only keeps one current directory +&Python; only keeps one current directory location even if there are multiple threads. This means that use of the chdir @@ -2871,7 +2904,7 @@ option, because individual worker threads spawned -by SCons interfere with each other +by &SCons; interfere with each other when they start changing directory. @@ -2965,7 +2998,7 @@ environment (indicated in the listing of builders below without a leading env.) may be called from custom &Python; modules that you -import into an SConscript file by adding the following +import into an &SConscript; file by adding the following to the &Python; module: @@ -3106,13 +3139,13 @@ When trying to handle errors that may occur in a builder method, consider that the corresponding Action is executed at a different -time than the SConscript file statement calling the builder. +time than the &SConscript; file statement calling the builder. It is not useful to wrap a builder call in a try block, since success in the builder call is not the same as the builder itself succeeding. If necessary, a Builder's Action should be coded to exit with -a useful exception message indicating the problem in the SConscript files - +a useful exception message indicating the problem in the &SConscript; files - programmatically recovering from build errors is rarely useful. @@ -3268,7 +3301,7 @@ &consvars;. Global functions may be called from custom Python modules that you -import into an SConscript file by adding the following import +import into an &SConscript; file by adding the following import to the Python module: @@ -3308,7 +3341,7 @@ In addition to the global functions and methods, &scons; supports a number of variables -that can be used in SConscript files +that can be used in &SConscript; files to affect how you want the build to be performed. @@ -3481,7 +3514,7 @@ only after you've made all of your &Default;() calls, or else simply be careful of the order -of these statements in your SConscript files +of these statements in your &SConscript; files so that you don't look for a specific default target before it's actually been added to the list. @@ -3490,7 +3523,7 @@ These variables may be accessed from custom Python modules that you -import into an SConscript file by adding the following +import into an &SConscript; file by adding the following to the Python module: @@ -3632,7 +3665,7 @@ (this is different than &Autoconf;), but uses its normal dependency tracking to keep the checked values up to date. However, users may override this behaviour with the - + command line option. @@ -3646,14 +3679,16 @@ which is used when running the tests and which can be updated with the check results. Only one context may be active -at a time (since 4.0, &scons; will raise an exception -on an attempt to create a new context when there is -an active context), but a new context can be created +at a time, but a new context can be created after the active one is completed. For the global function form, the required env describes the initial values for the context's local &consenv;; for the &consenv; method form the instance provides the values. + +Changed in version 4.0: raises an exception +on an attempt to create a new context when there is an active context. + custom_tests specifies a dictionary containing custom tests (see the section on custom tests below). @@ -3670,9 +3705,9 @@ specifies a file which collects the output from commands that are executed to check for the existence of header files, libraries, etc. The default is #/config.log. -If you are using the -&VariantDir; function, -you may want to specify a subdirectory under your variant directory. +If you are using variant directories, +you may want to place the log file for a given build +under that build's variant directory. config_h specifies a C header file where the results of tests @@ -3760,13 +3795,16 @@ A &configure_context; has the following predefined methods which can be used to perform checks. Where -language is a required or -optional parameter, the choice can currently -be C or C++. The spellings accepted for +language is an optional parameter, +it specifies the compiler to use for the check, +currently a choice of C or C++. +The spellings accepted for C are C or c; for C++ the value can be CXX, cxx, C++ or c++. +If language is omitted, +C is assumed. @@ -3775,7 +3813,7 @@ Checks if header -is usable in the specified language. +is usable in the specified language. header may be a list, in which case the last item in the list @@ -3791,14 +3829,8 @@ a two character string, where the first character denotes the opening quote and the second character denotes the closing quote. By default, both characters are " (double quote). -The optional argument -language -should be either -C -or -C++ -and selects the compiler to be used for the check. -Returns a boolean indicating success or failure. + +Returns a boolean indicating success or failure. @@ -3859,89 +3891,115 @@ context.CheckFunc(function_name, [header, language]) -Checks if the specified -C or C++ library function is available based on the -context's local environment settings (that is, using -the values of &cv-link-CFLAGS;, &cv-link-CPPFLAGS;, &cv-link-LIBS; -or other relevant &consvars;). +Checks if function_name is usable +in the context's local environment, using the compiler +specified by language - that is, +can a check referencing it be compiled using the current values +of &cv-link-CFLAGS;, &cv-link-CPPFLAGS;, +&cv-link-LIBS; or other relevant &consvars;. -function_name -is the name of the function to check for. The optional header -argument is a string -that will be -placed at the top -of the test file -that will be compiled -to check if the function exists; -the default is: +argument is a string representing a code fragment +to place at the top of the test program +that will be compiled to check if the function exists. +If omitted, the default stanza will be +(with function_name appropriately substituted): + #ifdef __cplusplus extern "C" #endif -char function_name(); +char function_name(void); -Returns an empty string on success, a string containing -an error message on failure. +Note: if header is supplied, +it should not +include the standard header file that declares +function_name, +and it should include a +dummy prototype similar to the default case. +Compilers reject builds where a function call does +not match the declared prototype as happens +if the "real" header is included, +and modern compilers are now rejecting +implicit function declarations. + +Returns a boolean indicating success or failure. - context.CheckLib([library, symbol, header, language, autoadd=True]) + context.CheckLib([library, symbol, header, language, autoadd=True, append=True, unique=False]) Checks if library provides -symbol. -If -autoadd -is true (the default) and the library provides the specified -symbol, -appends the library to the LIBS &consvar; -library -may also be None (the default), -in which case -symbol -is checked with the current LIBS variable, -or a list of library names, -in which case each library in the list -will be checked for -symbol. -If -symbol -is not set or is -None, -then -CheckLib +symbol by compiling a simple stub program +with the compiler selected by language, +and optionally adds that library to the context. +If supplied, the text of header is included at the +top of the stub. +If autoadd is true (the default), +and the library provides the specified +symbol (as defined by successfully +linking the stub program), +it is added to the &cv-link-LIBS; &consvar; in the context. +if append is true (the default), +the library is appended, otherwise it is prepended. +If unique is true, +and the library would otherwise be added but is +already present in &cv-link-LIBS; in the configure context, +it will not be added again. The default is False. + + +library can be a list of library names, +or None (the default if the argument is omitted). +If the former, symbol is checked against +each library name in order, returning +(and reporting success) on the first +successful test; if the latter, +it is checked with the current value of &cv-LIBS; +(in this case no library name would be added). +If symbol +is omitted or None, +then CheckLib just checks if you can link against the specified -library. +library, Note though it is legal syntax, it would not be very useful to call this method with library and symbol both -omitted or None. -Returns a boolean indicating success or failure. +omitted or None - +at least one should be supplied. + +Returns a boolean indicating success or failure. + +Changed in version 4.5.0: added the +append and unique +parameters. + - context.CheckLibWithHeader(library, header, language, [call, autoadd=True]) + context.CheckLibWithHeader(library, header, [language, call, autoadd=True, append=True, unique=False]) -Provides a more sophisticated way to check against libraries then the -CheckLib call. +Provides an alternative to the +CheckLib method +for checking for libraries usable in a build. library -specifies the library or a list of libraries to check. +specifies a library or list of libraries to check. header -specifies a header to check for. +specifies a header to include in the test program, +and language indicates the compiler to use. header may be a list, in which case the last item in the list @@ -3951,18 +4009,30 @@ #include lines should precede the header line being checked for. -call -can be any valid expression (with a trailing ';'). -If -call -is not set, -the default simply checks that you -can link against the specified +A code fragment +(must be a a valid expression, including a trailing semicolon) +to serve as the test can be supplied in +call; +if not supplied, +the default checks the ability to link against the specified library. -autoadd (default true) -specifies whether to add the library to the environment if the check -succeeds. -Returns a boolean indicating success or failure. +If autoadd is true (the default), +the first library that passes the check +is added to the &cv-link-LIBS; &consvar; in the context +and the method returns. +If append is true (the default), +the library is appended, otherwise prepended. +If unique is true, +and the library would otherwise be added but is +already present in &cv-link-LIBS; in the configure context, +it will not be added again. The default is False. + +Returns a boolean indicating success or failure. + +Changed in version 4.5.0: added the +append and unique +parameters. + @@ -3984,11 +4054,7 @@ sconf.CheckType('foo_type', '#include "my_types.h"', 'C++') - -Returns an empty string on success, a string containing -an error message on failure. - - +Returns a boolean indicating success or failure. @@ -4015,7 +4081,7 @@ type_name is actually that size. Returns the size in bytes, or zero if the type was not found -(or if the size did not match expect). +(or if the size did not match optional expect). For example, @@ -4046,6 +4112,7 @@ it can be used to detect if particular compiler flags will be accepted or rejected by the compiler. +Returns a boolean indicating success or failure. @@ -4066,6 +4133,7 @@ it can be used to detect if particular compiler flags will be accepted or rejected by the compiler. +Returns a boolean indicating success or failure. @@ -4088,6 +4156,7 @@ Note this does not check whether a shared library/dll can be created. +Returns a boolean indicating success or failure. @@ -4110,6 +4179,7 @@ Note this does not check whether a shared library/dll can be created. +Returns a boolean indicating success or failure. @@ -4136,7 +4206,8 @@ #include lines that will be inserted into the program that will be run to test for the existence of the symbol. -Returns a boolean indicating success or failure. + +Returns a boolean indicating success or failure. @@ -4418,7 +4489,8 @@ scons VARIABLE=foo -The variable values can also be specified in a configuration file or an SConscript file. +The variable values can also be specified in a configuration file +or an &SConscript; file. To obtain the object for manipulating values, call the &Variables; function: @@ -4730,7 +4802,7 @@ BoolVariable(key, help, default) -Returns a tuple of arguments +Return a tuple of arguments to set up a Boolean option. The option will use the specified name @@ -4739,7 +4811,7 @@ default, and help will form the descriptive part of the help text. -The option will interpret the values +The option will interpret the command-line values y, yes, t, @@ -4749,7 +4821,7 @@ and all as true, -and the values +and the command-line values n, no, f, @@ -4983,7 +5055,7 @@ BoolVariable( "warnings", help="compilation with -Wall and similar", - default=1, + default=True, ), EnumVariable( "debug", @@ -5054,7 +5126,7 @@ which you can then make use of. However, since filesystem Nodes have some useful public attributes and methods -that you can use in SConscript files, +that you can use in &SConscript; files, it is sometimes appropriate to create them manually, outside the regular context of a Builder call. @@ -6386,7 +6458,7 @@ First, if you need to perform the action -at the time the SConscript +at the time the &SConscript; file is being read, you can use the &f-link-Execute; @@ -6431,21 +6503,26 @@ dest file or directory to the specified mode -which can be octal or string, similar to the bash command. +which can be octal or string, similar to the POSIX +chmod command. Examples: Execute(Chmod('file', 0o755)) -env.Command('foo.out', 'foo.in', - [Copy('$TARGET', '$SOURCE'), - Chmod('$TARGET', 0o755)]) +env.Command( + 'foo.out', + 'foo.in', + [Copy('$TARGET', '$SOURCE'), Chmod('$TARGET', 0o755)], +) Execute(Chmod('file', "ugo+w")) -env.Command('foo.out', 'foo.in', - [Copy('$TARGET', '$SOURCE'), - Chmod('$TARGET', "ugo+w")]) +env.Command( + 'foo.out', + 'foo.in', + [Copy('$TARGET', '$SOURCE'), Chmod('$TARGET', "ugo+w")], +) @@ -6466,6 +6543,9 @@ source file or directory to the dest destination file or directory. +If src is a list, +dest must be a directory +if it already exists. Examples: @@ -6695,7 +6775,7 @@ echo Last build occurred $TODAY. > $TARGET -but the build signature added to any target files would be computed from: +but the &buildsig; added to any target files would be computed from: echo Last build occurred . > $TARGET @@ -6716,8 +6796,8 @@ env is the &consenv; to use for context, and for_signature is a boolean value that tells the callable -if it is being called for the purpose of generating a build signature. -Since the build signature is used for rebuild determination, +if it is being called for the purpose of generating a &buildsig;. +Since the &buildsig; is used for rebuild determination, variable elements that do not affect whether a rebuild should be triggered should be omitted from the returned string @@ -6747,8 +6827,8 @@ will be exactly as it was set: "$FOO baz". This can make debugging tricky, as the substituted result is not available at the time -the SConscript files are being interpreted and -thus not available to print(). +the &SConscript; files are being interpreted and +thus not available to the print function. However, you can perform the substitution on demand by calling the &f-link-env-subst; method for this purpose. @@ -7025,9 +7105,9 @@ according to the current value of env['COND'] when the command is executed. The evaluation takes place when the target is being -built, not when the SConscript is being read. So if +built, not when the &SConscript; is being read. So if env['COND'] is changed -later in the SConscript, the final value will be used. +later in the &SConscript;, the final value will be used. Here's a more complete example. Note that all of COND, @@ -7062,7 +7142,7 @@ returns the value. - + Use of the Python eval function is considered to have security implications, since, depending on input sources, @@ -7070,7 +7150,7 @@ Although &SCons; makes use of it in a somewhat restricted context, you should be aware of this issue when using the ${python-expression-for-subst} form. - + @@ -7213,7 +7293,7 @@ A Python function that takes four or five arguments: a &consenv;, a Node for the directory containing -the SConscript file in which +the &SConscript; file in which the first target was defined, a list of target nodes, a list of source nodes, @@ -7697,7 +7777,7 @@ "Mixing and matching" in this way can be made to work, but it requires careful attention to the use of path names -in your SConscript files. +in your &SConscript; files. In practice, users can sidestep the issue by adopting the following guidelines: @@ -8076,7 +8156,7 @@ Creating a Hierarchical Build Notice that the file names specified in a subdirectory's -SConscript file are relative to that subdirectory. +&SConscript; file are relative to that subdirectory. SConstruct: @@ -8112,7 +8192,7 @@ You must explicitly call &f-link-Export; and &f-link-Import; for variables that -you want to share between SConscript files. +you want to share between &SConscript; files. SConstruct: @@ -8163,7 +8243,7 @@ Note the use of the &f-link-Export; method to set the cppdefines variable to a different -value each time we call the &SConscriptFunc; function. +value each time we call the &f-link-SConscript; function. @@ -8346,7 +8426,7 @@ In general, &scons; is not controlled by environment variables set in the shell used to invoke it, leaving it -up to the SConscript file author to import those if desired. +up to the &SConscript; file author to import those if desired. However the following variables are imported by &scons; itself if set: @@ -8383,29 +8463,29 @@ (Windows only). If set, save the shell environment variables generated when setting up the Microsoft Visual C++ compiler -(and/or Build Tools) to a cache file, to give these settings, -which are relatively expensive to generate, persistence -across &scons; invocations. -Use of this option is primarily intended to aid performance -in tightly controlled Continuous Integration setups. +(and/or Build Tools) to a cache file, to give these settings +persistence across &scons; invocations. +Generating this information is relatively expensive, +so using this option may aid performance where &scons; is run often, +such as Continuous Integration setups. If set to a True-like value ("1", "true" or "True") will cache to a file named -.scons_msvc_cache.json in the user's home directory. +scons_msvc_cache.json in the user's home directory. If set to a pathname, will use that pathname for the cache. -Note: use this cache with caution as it -might be somewhat fragile: while each major toolset version -(e.g. Visual Studio 2017 vs 2019) and architecture pair will get separate -cache entries, if toolset updates cause a change -to settings within a given release series, &scons; will not -detect the change and will reuse old settings. -Remove the cache file in case of problems with this. -&scons; will ignore failures reading or writing the file -and will silently revert to non-cached behavior in such cases. +Note: this implementation may still be somewhat fragile. +In case of problems, remove the cache file - recreating with +fresh info normally resolves any issues. +&SCons; ignores failures reading or writing the cache file +and will silently revert to non-cached behavior in such cases. + -Available since &scons; 3.1 (experimental). +New in 3.1 (experimental). +The default cache file name was changed to +its present value in 4.4, and contents were expanded. + diff -Nru scons-4.4.0+dfsg/doc/reference/chtml.xsl scons-4.5.2+dfsg/doc/reference/chtml.xsl --- scons-4.4.0+dfsg/doc/reference/chtml.xsl 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/reference/chtml.xsl 2023-03-21 16:17:04.000000000 +0000 @@ -1,57 +1,57 @@ - - - - - - - - - - - - -/appendix toc,title -article/appendix nop -/article toc,title -book toc,title,figure,table,example,equation -/chapter toc,title -part toc,title -/preface toc,title -reference toc,title -/sect1 toc -/sect2 toc -/sect3 toc -/sect4 toc -/sect5 toc -/section toc -set toc,title - - - - + + + + + + + + + + + + +/appendix toc,title +article/appendix nop +/article toc,title +book toc,title,figure,table,example,equation +/chapter toc,title +part toc,title +/preface toc,title +reference toc,title +/sect1 toc +/sect2 toc +/sect3 toc +/sect4 toc +/sect5 toc +/section toc +set toc,title + + + + diff -Nru scons-4.4.0+dfsg/doc/reference/html.xsl scons-4.5.2+dfsg/doc/reference/html.xsl --- scons-4.4.0+dfsg/doc/reference/html.xsl 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/reference/html.xsl 2023-03-21 16:17:04.000000000 +0000 @@ -1,151 +1,151 @@ - - - - - - - - - - - - - -/appendix toc,title -article/appendix nop -/article toc,title -book toc,title,figure,table,example,equation -/chapter toc,title -part toc,title -/preface toc,title -reference toc,title -/sect1 toc -/sect2 toc -/sect3 toc -/sect4 toc -/sect5 toc -/section toc -set toc,title - - - - - - - - - - - - - - - - - - - - - - <xsl:copy-of select="$title"/> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + +/appendix toc,title +article/appendix nop +/article toc,title +book toc,title,figure,table,example,equation +/chapter toc,title +part toc,title +/preface toc,title +reference toc,title +/sect1 toc +/sect2 toc +/sect3 toc +/sect4 toc +/sect5 toc +/section toc +set toc,title + + + + + + + + + + + + + + + + + + + + + + <xsl:copy-of select="$title"/> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -Nru scons-4.4.0+dfsg/doc/reference/pdf.xsl scons-4.5.2+dfsg/doc/reference/pdf.xsl --- scons-4.4.0+dfsg/doc/reference/pdf.xsl 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/reference/pdf.xsl 2023-03-21 16:17:04.000000000 +0000 @@ -1,73 +1,73 @@ - - - - - - - - - - - - -0pt - - - - -/appendix toc,title -article/appendix nop -/article toc,title -book toc,title,figure,table,example,equation -/chapter toc,title -part toc,title -/preface toc,title -reference toc,title -/sect1 toc -/sect2 toc -/sect3 toc -/sect4 toc -/sect5 toc -/section toc -set toc,title - - - - bold - - - - - - - - - - - + + + + + + + + + + + + +0pt + + + + +/appendix toc,title +article/appendix nop +/article toc,title +book toc,title,figure,table,example,equation +/chapter toc,title +part toc,title +/preface toc,title +reference toc,title +/sect1 toc +/sect2 toc +/sect3 toc +/sect4 toc +/sect5 toc +/section toc +set toc,title + + + + bold + + + + + + + + + + + diff -Nru scons-4.4.0+dfsg/doc/scons.mod scons-4.5.2+dfsg/doc/scons.mod --- scons-4.4.0+dfsg/doc/scons.mod 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/scons.mod 2023-03-21 16:17:04.000000000 +0000 @@ -453,6 +453,7 @@ content signature"> content signatures"> build signature"> +build signatures"> true"> false"> diff -Nru scons-4.4.0+dfsg/doc/sphinx/conf.py scons-4.5.2+dfsg/doc/sphinx/conf.py --- scons-4.4.0+dfsg/doc/sphinx/conf.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/sphinx/conf.py 2023-03-21 16:17:04.000000000 +0000 @@ -38,7 +38,7 @@ 'sphinx.ext.napoleon', 'sphinx.ext.todo', 'sphinx.ext.viewcode', - 'sphinx_rtd_theme', + #'sphinx_rtd_theme', 'rst2pdf.pdfbuilder', ] @@ -46,15 +46,30 @@ autodoc_default_options = { "members": True, - #"special-members": True, + "special-members": False, "private-members": True, "inherited-members": True, "undoc-members": True, "exclude-members": '__weakref__', } autodoc_exclude_members = ['*Tests'] -napoleon_include_special_with_doc = False + +# Napoleon settings. Nearly all defaults, listed explicitly to be safe. +# See: https://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html#configuration +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_init_with_doc = False napoleon_include_private_with_doc = True +napoleon_include_special_with_doc = True # not default +napoleon_use_admonition_for_examples = True # not default +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = True # not default +napoleon_use_param = True +napoleon_use_rtype = True +napoleon_preprocess_types = False +napoleon_type_aliases = None +napoleon_attr_annotations = True # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -73,7 +88,7 @@ # General information about the project. project = 'SCons' -copyright = '2021, SCons Project' +copyright = '2022, SCons Project' author = 'SCons Project Team' # The version info for the project you're documenting, acts as replacement for @@ -111,7 +126,8 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = "sphinx_rtd_theme" +#html_theme = "sphinx_rtd_theme" +html_theme = "sphinx_book_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -212,4 +228,4 @@ # -- for PDF # Grouping the document tree into PDF files. List of tuples # (source start file, target name, title, author, options). -pdf_documents = [('index', u'scons-api', u'SCons API Docs', u'SCons Project'),] +pdf_documents = [('index', 'scons-api', 'SCons API Docs', 'SCons Project'),] diff -Nru scons-4.4.0+dfsg/doc/sphinx/index.rst scons-4.5.2+dfsg/doc/sphinx/index.rst --- scons-4.4.0+dfsg/doc/sphinx/index.rst 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/sphinx/index.rst 2023-03-21 16:17:04.000000000 +0000 @@ -1,19 +1,25 @@ -.. SCons documentation master file, created by +.. SCons documentation master file, originally created by sphinx-quickstart on Mon Apr 30 09:36:53 2018. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -SCons Project API Documentation -=============================== +SCons API Documentation +======================= -This is the internal API Documentation for SCons. -The Documentation is generated using the Sphinx tool. -The target audience is developers working on SCons itself, -so it does not clearly delineate what is "Public API" - -interfaces for use in your SCons configuration scripts -which have a consistency guarantee, and what is internal, -so always keep the SCons manual page around for helping -with such determinations. +.. Attention:: + This is the **internal** API Documentation for SCons. + The documentation is automatically generated for each release + from the source code using the + `Sphinx `_ tool. + Missing information is due to shortcomings in the docstrings in the code, + which are by no means complete (contributions welcomed!). + + The target audience is developers working on SCons itself: + what is "Public API" is not clearly deliniated here. + The interfaces available for use in SCons configuration scripts, + which have a consistency guarantee, are those documented in the + `SCons Reference Manual + `_. .. toctree:: :maxdepth: 2 @@ -25,6 +31,7 @@ SCons.Platform SCons.Scanner SCons.Script + SCons.Taskmaster SCons.Tool SCons.Variables diff -Nru scons-4.4.0+dfsg/doc/sphinx/SCons.rst scons-4.5.2+dfsg/doc/sphinx/SCons.rst --- scons-4.4.0+dfsg/doc/sphinx/SCons.rst 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/sphinx/SCons.rst 2023-03-21 16:17:04.000000000 +0000 @@ -18,6 +18,7 @@ SCons.Platform SCons.Scanner SCons.Script + SCons.Taskmaster SCons.Tool SCons.Variables SCons.compat @@ -83,11 +84,13 @@ SCons.Errors module ------------------- +.. Turn off inherited members to quiet fluff from the Python base Exception .. automodule:: SCons.Errors :members: :undoc-members: :show-inheritance: + :no-inherited-members: SCons.Executor module --------------------- @@ -97,14 +100,6 @@ :undoc-members: :show-inheritance: -SCons.Job module ----------------- - -.. automodule:: SCons.Job - :members: - :undoc-members: - :show-inheritance: - SCons.Memoize module -------------------- @@ -144,14 +139,6 @@ :members: :undoc-members: :show-inheritance: - -SCons.Taskmaster module ------------------------ - -.. automodule:: SCons.Taskmaster - :members: - :undoc-members: - :show-inheritance: SCons.Util module ----------------- diff -Nru scons-4.4.0+dfsg/doc/sphinx/SCons.Scanner.rst scons-4.5.2+dfsg/doc/sphinx/SCons.Scanner.rst --- scons-4.4.0+dfsg/doc/sphinx/SCons.Scanner.rst 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/sphinx/SCons.Scanner.rst 2023-03-21 16:17:04.000000000 +0000 @@ -44,6 +44,14 @@ :undoc-members: :show-inheritance: +SCons.Scanner.Java module +------------------------- + +.. automodule:: SCons.Scanner.Java + :members: + :undoc-members: + :show-inheritance: + SCons.Scanner.LaTeX module -------------------------- diff -Nru scons-4.4.0+dfsg/doc/sphinx/SCons.Taskmaster.rst scons-4.5.2+dfsg/doc/sphinx/SCons.Taskmaster.rst --- scons-4.4.0+dfsg/doc/sphinx/SCons.Taskmaster.rst 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/doc/sphinx/SCons.Taskmaster.rst 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,21 @@ +SCons.Taskmaster package +======================== + +Submodules +---------- + +SCons.Taskmaster.Job module +--------------------------- + +.. automodule:: SCons.Taskmaster.Job + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: SCons.Taskmaster + :members: + :undoc-members: + :show-inheritance: diff -Nru scons-4.4.0+dfsg/doc/user/build-install.xml scons-4.5.2+dfsg/doc/user/build-install.xml --- scons-4.4.0+dfsg/doc/user/build-install.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/user/build-install.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,10 @@ + + @@ -22,46 +28,19 @@ xmlns="http://www.scons.org/dbxsd/v1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.scons.org/dbxsd/v1.0 http://www.scons.org/dbxsd/v1.0/scons.xsd"> -Building and Installing &SCons; - - +Building and Installing &SCons; This chapter will take you through the basic steps - of installing &SCons; on your system, - and building &SCons; if you don't have a - pre-built package available - (or simply prefer the flexibility of building it yourself). + of installing &SCons; so you can use it for your your projects. Before that, however, this chapter will also describe the basic steps - involved in installing Python on your system, + involved in installing &Python; on your system, in case that is necessary. - Fortunately, both &SCons; and Python - are very easy to install on almost any system, - and Python already comes installed on many systems. + Fortunately, both &SCons; and &Python; + are easy to install on almost any system, + and &Python; already comes installed on many systems. @@ -70,31 +49,31 @@ Lastly, this chapter also contains a section that - provides a brief overview of the Python programming language, + provides a brief overview of the &Python; programming language, which is the language used to implement &SCons;, and which forms the basis of the &SCons; configuration files. - Becoming familiar with some Python concepts will make it easier + Becoming familiar with some &Python; concepts will make it easier to understand many of the examples in this User's Guide. Nevertheless, it is possible - to configure simple &SCons; builds without knowing Python, + to configure simple &SCons; builds without knowing &Python;, so you can skip this section if you want to dive in and pick up things by example- -or, of course, if you are - already familiar with Python. + already familiar with &Python;. --> -
+
Installing Python - Because &SCons; is written in Python, - you need to have Python installed on your system + Because &SCons; is written in the &Python; programming language, + you need to have a &Python; interpreter available on your system to use &SCons;. - Before you try to install Python, - you should check to see if Python is already + Before you try to install &Python;, + check to see if &Python; is already available on your system by typing python -V (capital 'V') @@ -106,28 +85,34 @@ $ python -V -Python 3.7.1 +Python 3.9.15 - Note to Windows users: there are a number of different ways Python + If you get a version like 2.7.x, you may need to try using the + name python3 - current &SCons; no longer + works with &Python; 2. + + + + Note to Windows users: there are a number of different ways &Python; can be installed or invoked on Windows, it is beyond the scope - of this guide to unravel all of them. Many will have an additional + of this guide to unravel all of them. Some have an additional program called the Python launcher (described, somewhat technically, in PEP 397): try using the command name py instead of python, if that is not available drop - back to trying python. + back to trying python C:\>py -V -Python 3.7.1 +Python 3.9.15 - If Python is not installed on your system, + If &Python; is not installed on your system, or is not findable in the current search path, you will see an error message stating something like "command not found" @@ -135,14 +120,14 @@ or "'python' is not recognized as an internal or external command, operable progam or batch file" (on Windows cmd). - In that case, you need to either install Python + In that case, you need to either install &Python; or fix the search path before you can install &SCons;. - The canonical location for downloading Python - from Python's own website is: + The link for downloading &Python; installers (Windows and Mac) + from the project's own website is: https://www.python.org/download. There are useful system-specific entries on setup and usage to be found at: @@ -150,34 +135,56 @@ - For Linux systems, Python is - almost certainly available as a supported package, possibly + For Linux systems, &Python; is + almost certainly available as a supported package, probably installed by default; this is often preferred over installing - by other means, and is easier than installing from source code. + by other means as the system package will be built with + carefully chosen optimizations, and will be kept up to date + with bug fixes and security patches. In fact, the &Python; + project itself does not build installers for Linux for this reason. Many such systems have separate packages for - Python 2 and Python 3 - make sure the Python 3 package is + &Python; 2 and &Python; 3 - make sure the &Python; 3 package is installed, as the latest &SCons; requires it. Building from source may still be a - useful option if you need a version that is not offered by + useful option if you need a specific version that is not offered by the distribution you are using. - &SCons; will work with Python 3.5 or later. - If you need to install Python and have a choice, - we recommend using the most recent Python version available. - Newer Pythons have significant improvements + Recent versions of the Mac no longer come with &Python; + pre-installed; older versions came with a rather out of date + version (based on &Python; 2.7) which is insufficient to run + current &SCons;. + The python.org installer can be used on the Mac, but there are + also other sources such as MacPorts and Homebrew. + The Anaconda installation also comes with a bundled &Python;. + + + + Windows has even more choices. The Python.org installer is + a traditional .exe style; + the same software is also released as a Windows application through + the Microsoft Store. Several alternative builds also exist + such as Chocolatey and ActiveState, and, again, + a version of Python comes with Anaconda. + + + + &SCons; will work with &Python; 3.6 or later. + If you need to install &Python; and have a choice, + we recommend using the most recent &Python; version available. + Newer &Python; versions have significant improvements that help speed up the performance of &SCons;.
-
+
Installing &SCons; - The canonical way to install &SCons; is from the Python Package - Index (PyPi): + The recommended way to install &SCons; is from the &Python; Package + Index (PyPI): @@ -185,9 +192,9 @@ - If you prefer not to install to the Python system location, - or do not have privileges to do so, you can add a flag to - install to a location specific to your own account: + If you prefer not to install to the &Python; system location, + or do not have privileges to do so, you can add a flag to install + to a location specific to your own account and &Python; version: @@ -197,7 +204,7 @@ For those users using Anaconda or Miniconda, use the conda installer instead, so the &scons; - install location will match the version of Python that + install location will match the version of &Python; that system will be using. For example: @@ -206,43 +213,61 @@ - &SCons; comes pre-packaged for installation on many Linux systems. - Check your package installation system - to see if there is an &SCons; package available. - Many people prefer to install distribution-native packages if available, - as they provide a central point for management and updating. - During the still-ongoing Python 2 to 3 transition, - some distributions may still have two &SCons; packages available, - one which uses Python 2 and one which uses Python 3. Since - the latest &scons; only runs on Python 3, to get the current version - you should choose the Python 3 package. + If you need a specific + version of &SCons; that is different from the current version, + pip has a version option + (e.g. python -m pip install scons==3.1.2), + or you can follow the instructions in the following sections. - If you need a specific - version of &SCons; that is different from the package available, - pip has a version option or you can follow - the instructions in the next section. + &SCons; does comes pre-packaged for installation on many Linux systems. + Check your package installation system + to see if there is an up-to-date &SCons; package available. + Many people prefer to install distribution-native packages if available, + as they provide a central point for management and updating; + however not all distributions update in a timely fashion. + During the still-ongoing &Python; 2 to 3 transition, + some distributions may still have two &SCons; packages available, + one which uses &Python; 2 and one which uses &Python; 3. Since + the latest &scons; only runs on &Python; 3, to get the current version + you should choose the &Python; 3 package.
-
- Building and Installing &SCons; on Any System +
+ Using &SCons; Without Installing - If a pre-built &SCons; package is not available for your system, - and installing using pip is not suitable, - then you can still easily build and install &SCons; using the native - Python setuptools package. + You don't actually need to "install" &SCons; to use it. + Nor do you need to "build" it, unless you are interested in + producing the &SCons; documentation, which does use several + tools to produce HTML, PDF and other output formats from + files in the source tree. + All you need to do is + call the scons.py driver script in a + location that contains an &SCons; tree, and it will figure out + the rest. You can test that like this: + +$ python /path/to/unpacked/scripts/scons.py --version + + - The first step is to download either the + To make use of an uninstalled &SCons;, + the first step is to download either the scons-&buildversion;.tar.gz or scons-&buildversion;.zip, which are available from the SCons download page at https://scons.org/pages/download.html. + There is also a scons-local bundle you can make + use of. It is arranged a little bit differently, with the idea + that you can include it with your own project if you want people + to be able to do builds without having to download or install &SCons;. + Finally, you can also use a checkout of the git tree from GitHub + at a location to point to. @@ -252,195 +277,88 @@ or WinZip on Windows. This will create a directory called scons-&buildversion;, - usually in your local directory. - Then change your working directory to that directory - and install &SCons; by executing the following commands: + usually in your local directory. The driver script + will be in a subdirectory named scripts, + unless you are using scons-local, + in which case it will be in the top directory. + Now you only need to call scons.py by + giving a full or relative path to it in order to use that + &SCons; version. - -# cd scons-&buildversion; -# python setup.py install - - - - This will build &SCons;, - install the &scons; script - in the python which is used to run the setup.py's scripts directory - (/usr/local/bin or - C:\Python37\Scripts), - and will install the &SCons; build engine - in the corresponding library directory for the python used - (/usr/local/lib/scons or - C:\Python37\scons). - Because these are system directories, - you may need root (on Linux or UNIX) or Administrator (on Windows) - privileges to install &SCons; like this. - + Note that instructions for older versions may have suggested + running python setup.py install to + "build and install" &SCons;. This is no longer recommended + (in fact, it is not recommended by the wider &Python; packaging + community for any end-user installations + of &Python; software). There is a setup.py file, + but it is only tested and used for the automated procedure which + prepares an &SCons; bundle for making a release on PyPI, + and even that is not guaranteed to work in future. - - -
- Building and Installing Multiple Versions of &SCons; Side-by-Side - - - - The &SCons; setup.py script - has some extensions that support - easy installation of multiple versions of &SCons; - in side-by-side locations. - This makes it easier to download and - experiment with different versions of &SCons; - before moving your official build process to a new version, - for example. +
- +
+ Running Multiple Versions of &SCons; Side-by-Side - - To install &SCons; in a version-specific location, - add the option - when you call setup.py: - + In some cases you may need several versions of &SCons; + present on a system at the same time - perhaps you have + an older project to build that has not yet been "ported" + to a newer &SCons; version, or maybe you want to test a + new &SCons; release side-by-side with a previous one + before switching over. + The use of an "uninstalled" package as described in the + previous section can be of use for this purpose. - -# python setup.py install --version-lib - - - - This will install the &SCons; build engine - in the - /usr/lib/scons-&buildversion; - or - C:\Python27\scons-&buildversion; - directory, for example. - + Another approach to multiple versions is to create + &Python; virtualenvs, and install different &SCons; versions in each. + A Python virtual environment + is a directory with an isolated set of Python packages, + where packages you install/upgrade/remove inside the + environment do not affect anything outside it, + and those you install/upgrade/remove outside of it + do not affect anything inside it. + In other words, anything you do with pip + in the environment stays in that environment. + The &Python; standard library provides a module called + venv for creating these + (), + although there are also other tools which provide more precise + control of the setup. - - If you use the option - the first time you install &SCons;, - you do not need to specify it each time you install - a new version. - The &SCons; setup.py script - will detect the version-specific directory name(s) - and assume you want to install all versions - in version-specific directories. - You can override that assumption in the future - by explicitly specifying the option. - + Using a virtualenv can be useful even for a single version of + &SCons;, to gain the advantages of having an isolated environment. + It also gets around the problem of not having administrative + privileges on a particular system to install a distribution + package or use pip to install to a + system location, as the virtualenv is completely under your control. -
- -
- Installing &SCons; in Other Locations - - - You can install &SCons; in locations other than - the default by specifying the option: - + The following outline shows how this could be set up + on a Linux/POSIX system (the syntax will be a bit different + on Windows): -# python setup.py install --prefix=/opt/scons +$ create virtualenv named scons3 +$ create virtualenv named scons4 +$ source scons3/bin/activate +$ pip install scons==3.1.2 +$ deactivate +$ source scons4/bin/activate +$ pip install scons +$ deactivate +$ activate a virtualenv and run 'scons' to use that version - - - This would - install the scons script in - /opt/scons/bin - and the build engine in - /opt/scons/lib/scons, - - - - - - Note that you can specify both the - and the options - at the same type, - in which case setup.py - will install the build engine - in a version-specific directory - relative to the specified prefix. - Adding to the - above example would install the build engine in - /opt/scons/lib/scons-&buildversion;. - - - -
- -
- Building and Installing &SCons; Without Administrative Privileges - - - - If you don't have the right privileges to install &SCons; - in a system location, - simply use the --prefix= option - to install it in a location of your choosing. - For example, - to install &SCons; in appropriate locations - relative to the user's $HOME directory, - the &scons; script in - $HOME/bin - and the build engine in - $HOME/lib/scons, - simply type: - - - - -$ python setup.py install --prefix=$HOME - - - - - You may, of course, specify any other location you prefer, - and may use the option - if you would like to install version-specific directories - relative to the specified prefix. - - - - - - This can also be used to experiment with a newer - version of &SCons; than the one installed - in your system locations. - Of course, the location in which you install the - newer version of the &scons; script - ($HOME/bin in the above example) - must be configured in your &PATH; variable - before the directory containing - the system-installed version - of the &scons; script. - - - -
-
+ %scons; @@ -18,32 +24,8 @@ xmlns="http://www.scons.org/dbxsd/v1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.scons.org/dbxsd/v1.0 http://www.scons.org/dbxsd/v1.0/scons.xsd"> -Hierarchical Builds - +Hierarchical Builds @@ -206,7 +187,7 @@ Will not only tell you reliably that the .class files - in the classes subdirectory + in the classes subdirectory are up-to-date: @@ -250,7 +231,7 @@ variable to specify the version in use. With Java 1.6, the one-liner example can then be defined like this: - + Java('classes', 'src', JAVAVERSION='1.6') @@ -280,8 +261,8 @@ -Java(target = 'classes', source = 'src') -Jar(target = 'test.jar', source = 'classes') +Java(target='classes', source='src') +Jar(target='test.jar', source='classes') public class Example1 @@ -344,10 +325,10 @@ -prog1_class_files = Java(target = 'classes', source = 'prog1') -prog2_class_files = Java(target = 'classes', source = 'prog2') -Jar(target = 'prog1.jar', source = prog1_class_files) -Jar(target = 'prog2.jar', source = prog2_class_files) +prog1_class_files = Java(target='classes', source='prog1') +prog2_class_files = Java(target='classes', source='prog2') +Jar(target='prog1.jar', source=prog1_class_files) +Jar(target='prog2.jar', source=prog2_class_files) public class Example1 @@ -418,8 +399,8 @@ -classes = Java(target = 'classes', source = 'src/pkg/sub') -JavaH(target = 'native', source = classes) +classes = Java(target='classes', source='src/pkg/sub') +JavaH(target='native', source=classes) package pkg.sub; @@ -642,8 +623,8 @@ -classes = Java(target = 'classes', source = 'src/pkg/sub') -RMIC(target = 'outdir', source = classes) +classes = Java(target='classes', source='src/pkg/sub') +RMIC(target='outdir', source=classes) package pkg.sub; diff -Nru scons-4.4.0+dfsg/doc/user/less-simple.xml scons-4.5.2+dfsg/doc/user/less-simple.xml --- scons-4.4.0+dfsg/doc/user/less-simple.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/user/less-simple.xml 2023-03-21 16:17:04.000000000 +0000 @@ -236,9 +236,10 @@ - You can also use the &Glob; function to find all files matching a + You can also use the &f-link-Glob; function to find all files matching a certain template, using the standard shell pattern matching - characters *, ? + characters * (to match everything), + ? (to match a single character) and [abc] to match any of a, b or c. [!abc] is also supported, @@ -254,13 +255,14 @@ - The SCons man page has more details on using &Glob; - with variant directories - (see , below) + &f-Glob; has powerful capabilities - it matches even if the + file does not exist, but &SCons; can determine that it would + exist after a build. + You will meet it again reading about + variant directories + (see ) and repositories - (see , below), - excluding some files - and returning strings rather than Nodes. + (see ). diff -Nru scons-4.4.0+dfsg/doc/user/main.xml scons-4.5.2+dfsg/doc/user/main.xml --- scons-4.4.0+dfsg/doc/user/main.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/user/main.xml 2023-03-21 16:17:04.000000000 +0000 @@ -54,10 +54,10 @@ The SCons Development Team - Released: Mon, 21 Nov 2021 17:07:47 -0700 + Released: Mon, 06 Mar 2023 23:58:40 -0400 - 2004 - 2021 + 2004 - 2023 The SCons Foundation diff -Nru scons-4.4.0+dfsg/doc/user/misc.xml scons-4.5.2+dfsg/doc/user/misc.xml --- scons-4.4.0+dfsg/doc/user/misc.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/user/misc.xml 2023-03-21 16:17:04.000000000 +0000 @@ -256,11 +256,11 @@ - The &FindFile; function searches for a file in a list of directories. + The &f-link-FindFile; function searches for a file in a list of directories. If there is only one directory, it can be given as a simple string. The function returns a File node if a matching file exists, or None if no file is found. - (See the documentation for the &Glob; function for an alternative way + (See the documentation for the &f-link-Glob; function for an alternative way of searching for entries in a directory.) diff -Nru scons-4.4.0+dfsg/doc/user/repositories.xml scons-4.5.2+dfsg/doc/user/repositories.xml --- scons-4.4.0+dfsg/doc/user/repositories.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/user/repositories.xml 2023-03-21 16:17:04.000000000 +0000 @@ -203,6 +203,10 @@ + The &f-link-Glob; function understands about repositories, + and will use the same matching algorithm as described for + explicitly-listed sources. +
diff -Nru scons-4.4.0+dfsg/doc/user/separate.xml scons-4.5.2+dfsg/doc/user/separate.xml --- scons-4.4.0+dfsg/doc/user/separate.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/doc/user/separate.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,10 @@ + + %scons; @@ -17,98 +23,111 @@ xmlns="http://www.scons.org/dbxsd/v1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.scons.org/dbxsd/v1.0 http://www.scons.org/dbxsd/v1.0/scons.xsd"> -Separating Source and Build Trees: Variant Directories - + - It's often useful to keep any built files completely - separate from the source files. Consider if you have a - project to build software for a variety of different - controller hardware. The boards are able to share a - lot of code, so it makes sense to keep them in the same - source tree, but certain build options in the source code - and header files differ. If you build "Controller A" first, - then "Controller B", on the "Controller B" build everything would - have to be rebuilt, because &SCons; recognizes that the build - instructions are different from those used in the "Controller A" - build for each target - the build instructions are part of - &SCons;'s out-of-date calculation. - Now when you go back and build for "Controller A", - things have to be rebuilt from scratch again for the same reason. + Consider if you have a project to build an embedded + software system for a variety of different controller hardware. + The system is able to share a lot of code, + so it makes sense to use a common source tree, + but certain build options in the source code + and header files differ. For a regular in-place build, + the build outputs go in the same place as the source code. + If you build Controller A first, + followed by Controller B, + on the Controller B build everything that + uses different build options has to be rebuilt since those + objects will be different + (the build lines, including preprocessor defines, are part of + &SCons;'s out-of-date calculation for this reason). + If you go back and build for Controller A again, + things have to be rebuilt again for the same reason. However, if you can separate the locations of the output files, + so each controller has its own location for build outputs, this problem can be avoided. - You can even set up to do both builds in one invocation of &SCons;. - You can enable this separation by establishing one or more - variant directory trees - that are used to perform the build in, and thus provide a unique - home for object files, libraries, and executable programs, etc. - for a specific flavor, or variant, of build. &SCons; tracks - targets by their path, so when the variant directory is included, - objects belonging to "Controller A" can have different - build instructions than those belonging to "Controller B" without - triggering ping-ponging rebuilds. + Having a separated build tree also helps you keep your source tree clean - + there is less chance of accidentally checking in build products + to version control that were not intended to be checked in. + You can add a separated build directory to your + version control system's list of items not to track. + You can even remove the whole build tree with a single command without + risking removing any of the source code. - &SCons; provides two ways to do this, - one through the &f-link-SConscript; function that we've already seen, + The key to making this separation work is the ability to + do out-of-tree builds: building under a separate root + than the sources being built. + You set up out of tree builds by establishing what &SCons; + calls a variant directory, + a place where you can build a single variant of your software + (of course you can define more than one of these if you need to). + Since &SCons; tracks targets by their path, it is able to distinguish + build products like build/A/network.obj + of the Controller A build + from build/B/network.obj + of the Controller B build, + thus avoiding conflicts. + + + + + + &SCons; provides two ways to establish variant directories, + one through the &f-link-SConscript; function that we have already seen, and the second through a more flexible &f-link-VariantDir; function. - Historical note: the &VariantDir; function - used to be called &BuildDir;, a name which was - removed because the &SCons; functionality + The variant directory mechanism does support doing multiple builds + in one invocation of &SCons;, but the remainder of this chapter + will focus on setting up a single build. You can combine these + techniques with ones from the previous chapter and elsewhere + in this Guide to set up more complex scenarios. + + + + + + The &VariantDir; function used to be called &BuildDir;, + a name which was changed because it turned out to be confusing: + the &SCons; functionality differs from a familiar model of a "build directory" - implemented by other build systems like GNU Autotools. + implemented by certain other build systems like GNU Autotools. You might still find references to the old name on the Internet in postings about &SCons;, but it no longer works. - + -
+
Specifying a Variant Directory Tree as Part of an &SConscript; Call The most straightforward way to establish a variant directory tree - relies the fact that the usual way to + relies on the fact that the usual way to set up a build hierarchy is to have an - SConscript file in the source subdirectory. + &SConscript; file in the source directory. If you pass a &variant_dir; argument to the &f-link-SConscript; function call: @@ -130,7 +149,7 @@ &SCons; will then build all of the files in - the &build; subdirectory: + the &build; directory: @@ -143,16 +162,14 @@ - No files were built in &src;, they went to &build;. - The build output might show a bit of a surprise: + No files were built in &src;: the object file build/hello.o and the executable file build/hello - were built in the &build; subdirectory, - as expected. - But even though our &hello_c; file lives in the &src; subdirectory, - &SCons; has actually compiled a + were built in the &build; directory, as expected. + But notice that even though our &hello_c; file actually + lives in the &src; directory, &SCons; has compiled a build/hello.c file to create the object file, and that file is now seen in &build;. @@ -161,28 +178,57 @@ + You can ask &SCons; to show the dependency tree to illustrate + a bit more: + + + + + scons -Q --tree=prune + + + + What's happened is that &SCons; has duplicated - the &hello_c; file from the &src; subdirectory - to the &build; subdirectory, + the &hello_c; file from the &src; directory + to the &build; directory, and built the program from there (it also duplicated &SConscript;). The next section explains why &SCons; does this. + + + The nice thing about the &SConscript; approach is it is almost + invisible to you: + this build looks just like an ordinary in-place build + except for the extra &variant_dir; argument in the + &f-link-SConscript; call. + &SCons; handles all the path adjustments for the + out of tree &build; directory while it processes that SConscript file. + + +
-
+
Why &SCons; Duplicates Source Files in a Variant Directory Tree - The important thing to understand is that when you set up a variant directory, - &SCons; performs the build in that directory. - It turns out it's easiest to ensure where build products end up - by just building in place. - Since the build is happening in a place different from where the - sources are, the most straightforward way to guarantee a correct build - is for &SCons; to copy them there. + When you set up a variant directory &SCons; conceptually behaves as + if you requested a build in that directory. + As noted in the previous chapter, + all builds actually happen from the top level directory, + but as an aid to understanding how &SCons; operates, think + of it as build in place in the variant directory, + not build in source but send build artifacts + to the variant directory. + It turns out in place builds are easier to get right than out + of tree builds - so by default &SCons; simulates an in place build + by making the variant directory look just like the source directory. + The most straightforward way to do that is by making copies + of the files needed for the build. @@ -192,7 +238,11 @@ in variant directories is simply that some tools (mostly older versions) are written to only build their output files - in the same directory as the source files. + in the same directory as the source files - such tools often don't + have any option to specify the output file, and the tool just + uses a predefined output file name, + or uses a derived variant of the source file name, + dropping the result in the same directory. In this case, the choices are either to build the output file in the source directory and move it to the variant directory, @@ -204,9 +254,9 @@ Additionally, relative references between files - can cause problems if we don't - just duplicate the hierarchy of source files - in the variant directory. + can cause problems which are resolved by + just duplicating the hierarchy of source files + into the variant directory. You can see this at work in use of the C preprocessor #include mechanism with double quotes, not angle brackets: @@ -240,8 +290,8 @@ Although source-file duplication guarantees a correct build - even in these end-cases, - it can usually be safely disabled. + even in these edge cases, + it can usually be safely disabled. The next section describes how you can disable the duplication of source files in the variant directory. @@ -250,19 +300,19 @@
-
+
Telling &SCons; to Not Duplicate Source Files in the Variant Directory Tree In most cases and with most tool sets, - &SCons; can place its target files in a build subdirectory + &SCons; can use sources directly from the source directory without - duplicating the source files + duplicating them into the variant directory before building, and everything will work just fine. - You can disable the default &SCons; behavior + You can disable the default &SCons; duplication behavior by specifying duplicate=False - when you call the &SConscript; function: + when you call the &f-link-SConscript; function: @@ -272,11 +322,11 @@ - When this flag is specified, - &SCons; uses the variant directory - like most people expect--that is, - the output files are placed in the variant directory - while the source files stay in the source directory: + When this flag is specified, the results of a build + look more like the mental model people may have from other + build systems - that is, + the output files end up in the variant directory + while the source files do not. @@ -292,15 +342,23 @@ hello.o + + + If disabling duplication causes any problems, + just return to the more cautious approach by letting + &SCons; go back to duplicating files. + + +
-
+
The &VariantDir; Function - Use the &VariantDir; function to establish that target - files should be built in a separate directory + You can also use the &f-link-VariantDir; function to establish + that target files should be built in a separate directory tree from the source files: @@ -318,13 +376,18 @@ - Note that when you're not using - an &SConscript; file in the &src; subdirectory, - you must actually specify that - the program must be built from - the build/hello.c - file that &SCons; will duplicate in the - &build; subdirectory. + When using this form, you have to tell &SCons; that + sources and targets are in the variant directory, + and those references will trigger the remapping, + necessary file copying, etc. for an already established + variant directory. Here is the same example in a more + spelled out form to show this more clearly: + + +VariantDir('build', 'src') +env = Environment() +env.Program(target='build/hello', source=['build/hello.c']) + @@ -345,7 +408,7 @@ You can specify the same duplicate=False argument - that you can specify for an &SConscript; call: + that you can specify for an &f-link-SConscript; call: @@ -375,13 +438,13 @@
-
+
Using &VariantDir; With an &SConscript; File - Even when using the &VariantDir; function, - it's more natural to use it with + Even when using the &f-link-VariantDir; function, + it is more natural to use it with a subsidiary &SConscript; file, because then you don't have to adjust your individual build instructions to use the variant directory path. @@ -428,21 +491,26 @@ - Notice that this is completely equivalent - to the use of &SConscript; that we - learned about in the previous section. + This is completely equivalent + to the use of &f-link-SConscript; with the + variant_dir argument + from earlier in this chapter, + but did require callng the SConscript using the already established + variant directory path to trigger that behavior. + If you call SConscript('src/SConscript') + you would get a normal in-place build in &src;.
-
+
Using &Glob; with &VariantDir; The &f-link-Glob; file name pattern matching function - works just as usual when using &VariantDir;. + works just as usual when using &f-link-VariantDir;. For example, if the src/SConscript looks like this: @@ -496,7 +564,7 @@ + %scons; @@ -13,36 +19,12 @@ %variables-mod; ]> -
-Variant Build Examples - +Variant Build Examples diff -Nru scons-4.4.0+dfsg/.editorconfig scons-4.5.2+dfsg/.editorconfig --- scons-4.4.0+dfsg/.editorconfig 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/.editorconfig 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,26 @@ +# EditorConfig file for SCons project + +root = true + +[*] +indent_style = space +indent_size = 4 +insert_final_newline = true +trim_trailing_whitespace = true +end_of_line = lf +charset = utf-8 + +[*.py] +max_line_length = 88 +ensure_newline_before_comments = true +include_trailing_comma = true +use_parentheses = true + +[*.xml] +indent_size = 2 + +[*.rst] +indent_size = 3 + +[*.bat] +end_of_line = crlf diff -Nru scons-4.4.0+dfsg/.flake8 scons-4.5.2+dfsg/.flake8 --- scons-4.4.0+dfsg/.flake8 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/.flake8 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,22 @@ +[flake8] +show-source = True +# don't complain about work black has done +max-line-length = 88 +extend-exclude = + bin, + bench, + doc, + src, + template, + testing, + test, + timings, + SCons/Tool/docbook/docbook-xsl-1.76.1, + bootstrap.py, + runtest.py +extend-ignore = + E302, + E305 +per-file-ignores = + # module symbols made available for compat - ignore "unused" warns + SCons/Util/__init__.py: F401 diff -Nru scons-4.4.0+dfsg/.git-blame-ignore-revs scons-4.5.2+dfsg/.git-blame-ignore-revs --- scons-4.4.0+dfsg/.git-blame-ignore-revs 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/.git-blame-ignore-revs 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,2 @@ +# files reformatted from DOS line-endings +1277d8e5ab6457ed18d291100539f31d1bdb2d7c diff -Nru scons-4.4.0+dfsg/.github/workflows/experimental_tests.yml scons-4.5.2+dfsg/.github/workflows/experimental_tests.yml --- scons-4.4.0+dfsg/.github/workflows/experimental_tests.yml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/.github/workflows/experimental_tests.yml 2023-03-21 16:17:04.000000000 +0000 @@ -19,8 +19,11 @@ build: strategy: + fail-fast: false matrix: - os: ['ubuntu-latest', 'windows-latest'] + # note: in the 2nd half of 2022 the setup-mingw was often failing on + # windows-latest. revisit someday (perhaps when there's an @v3) + os: ['ubuntu-latest', 'windows-2019', 'macos-latest'] # The type of runner that the job will run on runs-on: ${{ matrix.os }} @@ -32,19 +35,21 @@ - name: Set up MinGW uses: egor-tensin/setup-mingw@v2 - if: matrix.os == 'windows-latest' + if: matrix.os == 'windows-2019' with: platform: x64 - - name: Set up Python 3.8 ${{ matrix.os }} + - name: Set up Python 3.11 ${{ matrix.os }} uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: '3.11' + - name: Install dependencies including ninja ${{ matrix.os }} run: | python -m pip install --upgrade pip setuptools wheel python -m pip install ninja psutil # sudo apt-get update + - name: Test experimental packages ${{ matrix.os }} run: | python runtest.py test/import.py test/ninja diff -Nru scons-4.4.0+dfsg/.github/workflows/framework_tests.yml scons-4.5.2+dfsg/.github/workflows/framework_tests.yml --- scons-4.4.0+dfsg/.github/workflows/framework_tests.yml 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/.github/workflows/framework_tests.yml 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,42 @@ +name: Test Framework Tests + +on: + # PR events only on master + push: + branches: + - 'master' + paths: + - 'testing/framework/*' + + pull_request: + branches: + - 'master' + paths: + - 'testing/framework/*' + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +jobs: + fwtest: + strategy: + fail-fast: false + matrix: + os: ['ubuntu-latest', 'windows-latest'] + + # The type of runner that the job will run on + runs-on: ${{ matrix.os }} + + steps: + # Checkouut repository under $GITHUB_WORKSPACE + - uses: actions/checkout@v2 + + - name: Set up Python 3.11 ${{ matrix.os }} + uses: actions/setup-python@v2 + with: + python-version: '3.11' + + - name: Test test framework ${{ matrix.os }} + run: | + python runtest.py testing/framework + diff -Nru scons-4.4.0+dfsg/.github/workflows/runtest.yml scons-4.5.2+dfsg/.github/workflows/runtest.yml --- scons-4.4.0+dfsg/.github/workflows/runtest.yml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/.github/workflows/runtest.yml 2023-03-21 16:17:04.000000000 +0000 @@ -20,7 +20,7 @@ strategy: matrix: - os: ['ubuntu-latest'] + os: ['ubuntu-22.04'] # The type of runner that the job will run on runs-on: ${{ matrix.os }} @@ -30,21 +30,24 @@ # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v2 - - name: Set up Python 3.8 ${{ matrix.os }} + - name: Set up Python 3.10 ${{ matrix.os }} uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: '3.10' + - name: Install dependencies including ninja ${{ matrix.os }} run: | python -m pip install --upgrade pip setuptools wheel - python -m pip install ninja psutil + python -m pip install -r requirements-dev.txt # sudo apt-get update + - name: runtest ${{ matrix.os }} run: | - python runtest.py -a -j 2 - - name: Archive Failed tests + python runtest.py --all --time --jobs=2 + + - name: Archive Failed tests ${{ matrix.os }} uses: actions/upload-artifact@v2 with: - name: linux-failed-tests + name: ${{ matrix.os }}-failed-tests path: | failed_tests.log diff -Nru scons-4.4.0+dfsg/.github/workflows/scons-package.yml scons-4.5.2+dfsg/.github/workflows/scons-package.yml --- scons-4.4.0+dfsg/.github/workflows/scons-package.yml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/.github/workflows/scons-package.yml 2023-03-21 16:17:04.000000000 +0000 @@ -15,19 +15,22 @@ steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + + - name: Set up Python 3.10 uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: '3.10' + - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel #python -m pip install flake8 pytest - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + if [ -f requirements-pkg.txt ]; then pip install -r requirements-pkg.txt; elif [ -f requirements.txt ]; then pip install -r requirements.txt; fi sudo apt-get update sudo apt-get -y install docbook-xml docbook-xsl xsltproc fop docbook-xsl-doc-pdf - # try to keeo the texlive install as small as we can to save some time/space + # try to keep the texlive install as small as we can to save some time/space sudo apt-get -y --no-install-recommends install texlive biber texmaker ghostscript texlive-latex-base texlive-latex-extra texlive-bibtex-extra texlive-font-utils latexmk + # This is disabled until the run can be configured to only # check the code that matters, else we fail on non-essentials # like the bench/ stuff. @@ -37,14 +40,17 @@ # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + - name: Update Doc sources (some parts are generated) run: | python bin/docs-update-generated.py python bin/docs-validate.py python bin/docs-create-example-outputs.py + - name: Build SCons packages run: | python scripts/scons.py + - name: Verify package run: | ls -l build/dist diff -Nru scons-4.4.0+dfsg/packaging/debian/changelog scons-4.5.2+dfsg/packaging/debian/changelog --- scons-4.4.0+dfsg/packaging/debian/changelog 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/debian/changelog 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,330 @@ +scons (4.4.0) unstable; urgency=low + + * Feature release + + -- William Deegan Sat, 30 Jul 2022 14:08:29 -0700 + +scons (4.3.0) unstable; urgency=low + + * Feature release + + -- William Deegan Tue, 16 Nov 2021 18:12:46 -0700 + +scons (4.2.0) unstable; urgency=low + + * Feature release + + -- William Deegan Sat, 31 Jul 2021 18:12:46 -0700 + +scons (4.1.0) unstable; urgency=low + + * Feature release + + -- William Deegan Tues, 19 Jan 2021 15:04:42 -0700 + +scons (4.0.1) unstable; urgency=low + + * Maintenance Release + + -- William Deegan Thu, 16 Jul 2020 15:04:42 -0700 + +scons (4.0.0) unstable; urgency=low + + * Maintenance Release + + -- William Deegan Mon, 16 Dec 2019 15:04:42 -0700 + +scons (3.1.1) unstable; urgency=low + + * Maintenance Release + + -- William Deegan Mon, 16 Dec 2019 15:04:42 -0700 + +scons (3.1.0) unstable; urgency=low + + * Maintenance Release + + -- William Deegan Sat, 20 Jul 2019 15:04:42 -0700 + +scons (3.0.4) unstable; urgency=low + + * Maintenance Release + + -- William Deegan Sun, 20 Jan 2019 19:44:18 -0700 + +scons (3.0.3) unstable; urgency=low + + * Maintenance Release + + -- William Deegan Sat, 07 Jan 2019 19:44:18 -0700 + +scons (3.0.2) unstable; urgency=low + + * Maintenance Release + + -- William Deegan Mon, 31 Dec 2018 15:14:21 -0700 + +scons (3.0.1) unstable; urgency=low + + * Maintenance Release + + -- William Deegan Mon, 12 Nov 2017 15:31:33 -0700 + +scons (3.0.0) unstable; urgency=low + + * Feature Release + + -- William Deegan Mon, 18 Sep 2017 08:32:04 -0700 + +scons (2.5.1) unstable; urgency=low + + * Maintenance Release + + -- William Deegan Mon, 03 Nov 2016 13:37:42 -0700 + +scons (2.5.0) unstable; urgency=low + + * Feature Release + + -- William Deegan Sat, 09 Apr 2016 08:56:00 -0700 + +scons (2.4.1) unstable; urgency=low + + * Maintenance release. + + -- William Deegan Sat, 07 Nov 2015 08:56:00 -0700 + +scons (2.4.0) unstable; urgency=low + + * Maintenance release. + + -- William Deegan Mon, 21 Sep 2015 08:56:00 -0700 + +scons (2.3.6) unstable; urgency=low + + * Maintenance release. + + -- William Deegan Mon, 17 Jun 2015 21:07:32 -0700 + +scons (2.3.5) unstable; urgency=low + + * Maintenance release. + + -- William Deegan Mon, 17 Jun 2015 21:07:32 -0700 + +scons (2.3.4) unstable; urgency=low + + * Maintenance release. + + -- Gary Oberbrunner Sun, 27 Sep 2014 21:00:00 -0500 + +scons (2.3.3) unstable; urgency=low + + * Maintenance release. + + -- Gary Oberbrunner Sun, 24 Aug 2014 21:00:00 -0500 + +scons (2.3.2) unstable; urgency=low + + * Maintenance release. + + -- Gary Oberbrunner Fri, 4 July 2014 21:00:00 -0500 + +scons (2.3.0) unstable; urgency=low + + * Maintenance release. + + -- Gary Oberbrunner Sat, 9 Feb 2013 21:00:00 -0500 + + +scons (2.2.0) unstable; urgency=low + + * Maintenance release. + + -- Gary Oberbrunner Sun, 12 Aug 2012 09:00:00 -0500 + + +scons (0.97) unstable; urgency=low + + * Eighth beta release. + + -- Steven Knight Thu, 17 May 2007 08:59:41 -0500 + + +scons (0.96-1) unstable; urgency=low + + * Seventh beta release. + + -- Steven Knight Wed, 18 Aug 2004 13:36:40 +0000 + + +scons (0.95-1) unstable; urgency=low + + * Sixth beta release. + + -- Steven Knight Mon, 08 Mar 2004 06:43:20 -0600 + + +scons (0.94-1) unstable; urgency=low + + * Fifth beta release. + + -- Steven Knight Fri, 07 Nov 2003 05:29:48 -0600 + + +scons (0.93-1) unstable; urgency=low + + * Fourth beta release. + + -- Steven Knight Thu, 23 Oct 2003 07:26:55 -0500 + + +scons (0.92-1) unstable; urgency=low + + * Third beta release. + + -- Steven Knight Wed, 20 Aug 2003 03:45:28 -0500 + + +scons (0.91-1) unstable; urgency=low + + * Second beta release. + + -- Steven Knight Thu, 14 Aug 2003 13:00:44 -0500 + + +scons (0.90-1) unstable; urgency=low + + * First beta release. + + -- Steven Knight Wed, 25 Jun 2003 14:24:52 -0500 + + +scons (0.14-1) unstable; urgency=low + + * Eighth post-official-Debian build cycle + + -- Steven Knight Wed, 21 May 2003 05:16:32 -0500 + + +scons (0.13-1) unstable; urgency=low + + * Seventh post-official-Debian build cycle + + -- Steven Knight Mon, 31 Mar 2003 20:22:00 -0600 + + +scons (0.12-1) unstable; urgency=low + + * Sixth post-official-Debian build cycle + + -- Steven Knight Thu, 27 Mar 2003 23:52:09 -0600 + + +scons (0.11-1) unstable; urgency=low + + * Fifth post-official-Debian build cycle + + -- Steven Knight Tue, 11 Feb 2003 05:24:33 -0600 + + +scons (0.10-1) unstable; urgency=low + + * Fourth post-official-Debian build cycle + + -- Steven Knight Thu, 16 Jan 2003 04:11:46 -0600 + + +scons (0.09-1) unstable; urgency=low + + * Third post-official-Debian build cycle + + -- Steven Knight Thu, 5 Dec 2002 04:48:25 -0600 + + +scons (0.08-1) unstable; urgency=low + + * New upstream release (Closes: #158373) + + -- Moshe Zadka Mon, 21 Oct 2002 16:52:11 +0200 + +scons (0.07-3) unstable; urgency=low + + * Removing /usr/doc links + * Python 2.1 -> Python 2.2 + + -- Moshe Zadka Tue, 27 Aug 2002 18:48:02 +0300 + +scons (0.07-2) unstable; urgency=low + + * Ugh, who would put distutils.core in python-dev? Changing build + dependancies (Closes: #146601) + * While we're at it, update Standards-Version + + -- Moshe Zadka Sat, 11 May 2002 08:25:07 +0300 + +scons (0.07-1) unstable; urgency=low + + * New upstream version + * Packaging properly, instead of as debian native + * Modifying copyright to include upstream location (Closes: #144491) + * Fixing my e-mail address (Closes: #144490) + * Thanks for looking after my packages, tbm ;-) + + -- Moshe Zadka Sat, 4 May 2002 13:05:53 +0300 + +scons (0.06-1) unstable; urgency=low + + * New upstream version + + -- Moshe Zadka Mon, 15 Apr 2002 19:22:09 +0300 + +scons (0.05-1) unstable; urgency=low + + * First upload (closes: #140128) + + -- Moshe Zadka Wed, 27 Mar 2002 10:40:42 +0200 + + +scons (0.06-1) unstable; urgency=low + + * Sixth Release. + + -- Steven Knight Thu, 28 Mar 2002 01:24:29 -0600 + + +scons (0.05-1) unstable; urgency=low + + * Fifth Release. + + -- Steven Knight Thu, 21 Feb 2002 16:50:03 -0600 + + +scons (0.04-1) unstable; urgency=low + + * Fourth Release. + + -- Steven Knight Wed, 30 Jan 2002 11:09:42 -0600 + + +scons (0.03-1) unstable; urgency=low + + * Third Release. + + -- Steven Knight Fri, 11 Jan 2002 01:09:30 -0600 + + +scons (0.02-1) unstable; urgency=low + + * Second Release. + + -- Steven Knight Fri, 14 Dec 2001 13:23:03 -0600 + + +scons (0.01-1) unstable; urgency=low + + * Initial Release. + + -- Anthony Roach Mon, 1 Oct 2001 23:24:45 -0500 + + diff -Nru scons-4.4.0+dfsg/packaging/debian/compat scons-4.5.2+dfsg/packaging/debian/compat --- scons-4.4.0+dfsg/packaging/debian/compat 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/debian/compat 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1 @@ +5 diff -Nru scons-4.4.0+dfsg/packaging/debian/control scons-4.5.2+dfsg/packaging/debian/control --- scons-4.4.0+dfsg/packaging/debian/control 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/debian/control 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,23 @@ +Source: scons +Section: devel +Priority: optional +Maintainer: Moshe Zadka +Build-Depends-Indep: debhelper (>> 5.0.0), python-dev (>> 2.7) +Standards-Version: 3.5.6 + +Package: scons +Architecture: all +Depends: python (>> 2.7) +Description: A replacement for Make + SCons is an Open Source software construction tool--that is, a build + tool; an improved substitute for the classic Make utility; a better + way to build software. SCons is based on the design which won the + Software Carpentry build tool design competition in August 2000. SCons + "configuration files" are Python scripts that call an API to establish + dependencies and specify how targets are built. SCons maintains a global + view of all dependencies in a tree, and can scan source (or other) files + for implicit dependencies, such as files specified on #include lines. + SCons uses MD5 signatures to rebuild only when the contents of a file + have really changed, not just when the timestamp has been touched. + SCons supports side-by-side variant builds, and is easily extended with + user-defined Builder and/or Scanner objects. diff -Nru scons-4.4.0+dfsg/packaging/debian/copyright scons-4.5.2+dfsg/packaging/debian/copyright --- scons-4.4.0+dfsg/packaging/debian/copyright 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/debian/copyright 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,66 @@ +This package was debianized by Moshe Zadka on +Wed, 27 Mar 2002 10:40:28 +0200. + +Upstream Author(s): +Steven Knight +knight at baldmt dot com +http://www.baldmt.com/~knight/ + +With plenty of help from the SCons Development team: + Chad Austin + Charles Crain + Steve Leblanc + Anthony Roach + + +Copyright: + +The software comes with the following notice in the file LICENSE.txt: +__COPYRIGHT__ + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +This package was debianized by Anthony Roach + +The package source can be downloaded from http://www.scons.org/ + +Upstream Author: Steven Knight + +__COPYRIGHT__ + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + diff -Nru scons-4.4.0+dfsg/packaging/debian/dirs scons-4.5.2+dfsg/packaging/debian/dirs --- scons-4.4.0+dfsg/packaging/debian/dirs 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/debian/dirs 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,3 @@ +usr/bin +usr/share/doc/scons +usr/share/man/man1 diff -Nru scons-4.4.0+dfsg/packaging/debian/docs scons-4.5.2+dfsg/packaging/debian/docs --- scons-4.4.0+dfsg/packaging/debian/docs 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/debian/docs 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1 @@ +usr/share/doc/scons/* diff -Nru scons-4.4.0+dfsg/packaging/debian/postinst scons-4.5.2+dfsg/packaging/debian/postinst --- scons-4.4.0+dfsg/packaging/debian/postinst 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/debian/postinst 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,19 @@ +#!/bin/sh +set -e +if [ "$1" = "configure" ]; then + if [ -d /usr/doc -a ! -e /usr/doc/scons -a -d /usr/share/doc/scons ]; then + ln -sf ../share/doc/scons /usr/doc/scons + fi +fi + +NAME=scons +PYTHON_VERSION=`python -V 2>&1| cut -d" " -f2 | cut -d"." -f1-2` +PYTHON_EXE_NAME=`which python$PYTHON_VERSION` +case "$1" in + configure|abort-upgrade|abort-remove|abort-deconfigure) + dpkg --listfiles $NAME | grep '\.py$' | \ + xargs -n 1 $PYTHON_EXE_NAME -c 'import py_compile,sys;py_compile.compile(sys.argv[1])' + dpkg --listfiles $NAME | grep '\.py$' | \ + xargs -n 1 $PYTHON_EXE_NAME -O -c 'import py_compile,sys;py_compile.compile(sys.argv[1])' + ;; +esac diff -Nru scons-4.4.0+dfsg/packaging/debian/prerm scons-4.5.2+dfsg/packaging/debian/prerm --- scons-4.4.0+dfsg/packaging/debian/prerm 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/debian/prerm 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,12 @@ +#!/bin/sh +set -e + +NAME=scons + +dpkg --listfiles $NAME | + awk '$0~/\.py$/ {print $0"c\n" $0"o"}' | + xargs rm -f >&2 + +if [ \( "$1" = "upgrade" -o "$1" = "remove" \) -a -L /usr/doc/scons ]; then + rm -f /usr/doc/scons +fi diff -Nru scons-4.4.0+dfsg/packaging/debian/rules scons-4.5.2+dfsg/packaging/debian/rules --- scons-4.4.0+dfsg/packaging/debian/rules 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/debian/rules 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,113 @@ +#!/usr/bin/make -f +# Sample debian/rules that uses debhelper. +# GNU copyright 1997 to 1999 by Joey Hess. + +######## +# Overridable variables added to support building test .deb files +# as part of routine SCons builds. --SK +BUILDDEB_OPTIONS= +PYTHON_VERSION=`python -V 2>&1| cut -d" " -f2 | cut -d"." -f1-2` +PYTHON_PATH=/usr/bin/python +PYTHON=$(PYTHON_PATH)$(PYTHON_VERSION) +####### + +# Uncomment this to turn on verbose mode. +#export DH_VERBOSE=1 + +# This is the debhelper compatability version to use. +configure: configure-stamp +configure-stamp: + dh_testdir + # Add here commands to configure the package. + + + touch configure-stamp + +build: configure-stamp build-stamp +build-stamp: + dh_testdir + $(PYTHON) setup.py build + touch build-stamp + +clean: + dh_testdir + dh_testroot + rm -f build-stamp configure-stamp + rm -rf build/ + dh_clean + +install: build + dh_testdir + dh_testroot + dh_prep + dh_installdirs + + # Add here commands to install the package into debian/scons. + @######## + @# The SCons project uses Aegis for development, which requires + @# that targets be removed explicitly before they're created. + @# (They could be symlinks to checked-in read-only copies in the + @# repository.) We also can't assume that the proper directories + @# already exist on our non-Debian test build systems. Hence, + @# we do a lot of mkdir -p and rm -f here... --SK + @######## + mkdir -p debian/scons/usr/lib/python$(PYTHON_VERSION)/site-packages/ + rm -rf debian/scons/usr/lib/python$(PYTHON_VERSION)/site-packages/SCons + cp -r build/lib*/SCons debian/scons/usr/lib/python$(PYTHON_VERSION)/site-packages/ + + mkdir -p debian/scons/usr/bin/ + rm -f debian/scons/usr/bin/scons + rm -f debian/scons/usr/bin/sconsign +ifeq ($(PYTHON),python) + cp build/scripts/scons debian/scons/usr/bin/scons + cp build/scripts/sconsign debian/scons/usr/bin/sconsign +else + sed '1s|.*|#!/usr/bin/python2.2|' build/scripts/scons > debian/scons/usr/bin/scons + sed '1s|.*|#!/usr/bin/python2.2|' build/scripts/sconsign > debian/scons/usr/bin/sconsign +endif + chmod +x debian/scons/usr/bin/scons + chmod +x debian/scons/usr/bin/sconsign + + mkdir -p debian/scons/usr/share/man/man1/ + rm -f debian/scons/usr/share/man/man1/scons.1 + rm -f debian/scons/usr/share/man/man1/sconsign.1 + cp scons.1 debian/scons/usr/share/man/man1/ + cp sconsign.1 debian/scons/usr/share/man/man1/ + + mkdir -p debian/scons/usr/share/doc/scons + rm -f debian/scons/usr/share/doc/scons/changelog + rm -f debian/scons/usr/share/doc/scons/README.txt + rm -f debian/scons/usr/share/doc/scons/CHANGES.txt + rm -f debian/scons/usr/share/doc/scons/*.gz + rm -f debian/scons/usr/share/doc/scons/copyright + + cp README.txt debian/scons/usr/share/doc/scons/ + cp CHANGES.txt debian/scons/usr/share/doc/scons/ + + gzip -9 debian/scons/usr/share/doc/scons/* + + cp debian/changelog debian/scons/usr/share/doc/scons/changelog + + cp debian/copyright debian/scons/usr/share/doc/scons/ + +# Build architecture-independent files here. +binary-indep: build install + dh_testdir + dh_testroot + #dh_installdocs + dh_installchangelogs + dh_link + dh_strip + dh_compress + dh_fixperms + dh_installdeb + dh_shlibdeps + dh_gencontrol + dh_md5sums + dh_builddeb $(BUILDDEB_OPTIONS) + +# Build architecture-dependent files here. +binary-arch: build install + +binary: binary-indep binary-arch +.PHONY: build clean binary-indep binary-arch binary install configure diff -Nru scons-4.4.0+dfsg/packaging/etc/README.txt scons-4.5.2+dfsg/packaging/etc/README.txt --- scons-4.4.0+dfsg/packaging/etc/README.txt 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/etc/README.txt 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,10 @@ +This directory contains a number of scripts/files useful when building/packageing SCons + +To force SCons to propagate SOURCE_DATE_EPOCH from the shell running SCons we're providing +a script to create a ~/.scons/site_scons/site_init.py. +Note that reproducible_install.sh will NOT overwite an existing ~/.scons/site_scons/site_init.py +This supports https://reproducible-builds.org/specs/source-date-epoch/ +If you wanted to include this in your build tree you would place in site_scons/site_init.py relative +to your SConstruct. +* reproducible_install.sh +* reproducible_site_init.py \ No newline at end of file diff -Nru scons-4.4.0+dfsg/packaging/etc/reproducible_install.sh scons-4.5.2+dfsg/packaging/etc/reproducible_install.sh --- scons-4.4.0+dfsg/packaging/etc/reproducible_install.sh 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/etc/reproducible_install.sh 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) +TARGET_FILE=~/.scons/site_scons/site_init.py + +mkdir -p ~/.scons/site_scons + +if [ ! -f "${TARGET_FILE}" ] +then + echo "File ${TARGET_FILE} does not exist" + echo "We will add one which supports reproducible builds" + cp ${SCRIPT_DIR}/reproducible_site_init.py ${TARGET_FILE} +else + echo "File ${TARGET_FILE} already exists" + echo "We will not overwrite it. Please copy the content" + echo "from ${SCRIPT_DIR}/reproducible_site_init.py" +fi diff -Nru scons-4.4.0+dfsg/packaging/etc/reproducible_site_init.py scons-4.5.2+dfsg/packaging/etc/reproducible_site_init.py --- scons-4.4.0+dfsg/packaging/etc/reproducible_site_init.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/etc/reproducible_site_init.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,24 @@ +""" +Use this file as your ~/.site_scons/scons_init.py to enable reprodicble builds as described at +https://reproducible-builds.org/specs/source-date-epoch/ +""" + +import os +import SCons.Environment + +old_init = SCons.Environment.Base.__init__ + +print("Adding logic to propagate SOURCE_DATE_EPOCH from the shell environment when building with SCons") + + +def new_init(self, **kw): + """ + This logic will add SOURCE_DATE_EPOCH to the execution environment used to run + all the build commands. + """ + old_init(self, **kw) + if 'SOURCE_DATE_EPOCH' in os.environ: + self._dict['ENV']['SOURCE_DATE_EPOCH'] = os.environ['SOURCE_DATE_EPOCH'] + + +SCons.Environment.Base.__init__ = new_init diff -Nru scons-4.4.0+dfsg/packaging/rpm/scons.spec.in scons-4.5.2+dfsg/packaging/rpm/scons.spec.in --- scons-4.4.0+dfsg/packaging/rpm/scons.spec.in 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/packaging/rpm/scons.spec.in 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,54 @@ +%define name scons +%define version __VERSION__ +%define release 1 +%define _unpackaged_files_terminate_build 0 + +Summary: an Open Source software construction tool +Name: %{name} +Version: %{version} +Release: %{release} +Source0: %{name}-%{version}.tar.gz +#Copyright: The SCons Foundation +License: MIT, freely distributable +Group: Development/Tools +BuildRoot: %{_tmppath}/%{name}-buildroot +Prefix: %{_prefix} +BuildArchitectures: noarch +Vendor: The SCons Development Team +Packager: The SCons Development Team +Requires: python >= 2.4 +Url: http://www.scons.org/ + +%description +SCons is an Open Source software construction tool--that is, a build +tool; an improved substitute for the classic Make utility; a better way +to build software. SCons is based on the design which won the Software +Carpentry build tool design competition in August 2000. + +SCons "configuration files" are Python scripts, eliminating the need +to learn a new build tool syntax. SCons maintains a global view of +all dependencies in a tree, and can scan source (or other) files for +implicit dependencies, such as files specified on #include lines. SCons +uses MD5 signatures to rebuild only when the contents of a file have +really changed, not just when the timestamp has been touched. SCons +supports side-by-side variant builds, and is easily extended with user- +defined Builder and/or Scanner objects. + +%prep +%setup + +%build +python setup.py build + +%install +python setup.py install --root=$RPM_BUILD_ROOT --install-lib=/usr/lib/scons --install-scripts=/usr/bin --install-data=/usr/share + +%clean +rm -rf $RPM_BUILD_ROOT + +%files +%defattr(-,root,root) +__RPM_FILES__ +%doc %{_mandir}/man1/scons.1* +%doc %{_mandir}/man1/sconsign.1* +%doc %{_mandir}/man1/scons-time.1* diff -Nru scons-4.4.0+dfsg/pyproject.toml scons-4.5.2+dfsg/pyproject.toml --- scons-4.4.0+dfsg/pyproject.toml 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/pyproject.toml 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,13 @@ +[build-system] +build-backend = "setuptools.build_meta" +requires = ["setuptools"] + +# for black and mypy, set the lowest Python version supported +[tool.black] +quiet = true +target-version = ['py36'] +skip-string-normalization = true + +[mypy] +python_version = 3.6 + diff -Nru scons-4.4.0+dfsg/README.rst scons-4.5.2+dfsg/README.rst --- scons-4.4.0+dfsg/README.rst 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/README.rst 2023-03-21 16:17:04.000000000 +0000 @@ -117,7 +117,7 @@ ========================= SCons has no installation dependencies beyond a compatible version -of Python. The tools which will be used to to actually construct the +of Python. The tools which will be used to actually construct the project, such as compilers, documentation production tools, etc. should of course be installed by the appropriate means. @@ -258,6 +258,14 @@ is available at https://www.scons.org/donate.html or the GitHub Sponsors button on https://github.com/scons/scons. +Reproducible Builds +=================== +In order to suppor those users who which to produce reproducible builds +(https://reproducible-builds.org/specs/source-date-epoch/) we're now including +logic to force SCons to propagate SOURCE_DATE_EPOCH from your shell environment for +all SCons builds to support reproducible builds we're now providing an example +site_init.py and a script to install it in your ~/.scons. See packaging/etc/README.txt +for more info For More Information ==================== diff -Nru scons-4.4.0+dfsg/README-SF.rst scons-4.5.2+dfsg/README-SF.rst --- scons-4.4.0+dfsg/README-SF.rst 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/README-SF.rst 2023-03-21 16:17:04.000000000 +0000 @@ -136,7 +136,7 @@ - Install scripts named "scons" and "sconsign" scripts in the default system script directory (/usr/bin or C:\\Python\*\\Scripts, for example). -- Install "scons-3.1.2.exe" and "scons.exe" executables in the Python +- Install "scons-4.5.1.exe" and "scons.exe" executables in the Python prefix directory on Windows (C:\\Python\*, for example). - Install the SCons build engine (a Python module) in the standard Python library directory @@ -378,7 +378,7 @@ Building Packages ================= -We use SCons (version 3.1.2 or later) to build its own packages. If you +We use SCons to build its own packages. If you already have an appropriate version of SCons installed on your system, you can build everything by simply running it:: @@ -393,12 +393,12 @@ Depending on the utilities installed on your system, any or all of the following packages will be built:: - SCons-4.2.0-py3-none-any.whl - SCons-4.3.0ayyyymmdd.tar.gz - SCons-4.3.0ayyyymmdd.zip - scons-doc-4.3.0ayyyymmdd.tar.gz - scons-local-4.3.0ayyyymmdd.tar.gz - scons-local-4.3.0ayyyymmdd.zip + SCons-4.5.1-py3-none-any.whl + SCons-4.7.0ayyyymmdd.tar.gz + SCons-4.7.0ayyyymmdd.zip + scons-doc-4.7.0ayyyymmdd.tar.gz + scons-local-4.7.0ayyyymmdd.tar.gz + scons-local-4.7.0ayyyymmdd.zip The SConstruct file is supposed to be smart enough to avoid trying to build packages for which you don't have the proper utilities installed. @@ -612,5 +612,5 @@ \... and many others. -Copyright (c) 2001 - 2021 The SCons Foundation +Copyright (c) 2001 - 2023 The SCons Foundation diff -Nru scons-4.4.0+dfsg/ReleaseConfig scons-4.5.2+dfsg/ReleaseConfig --- scons-4.4.0+dfsg/ReleaseConfig 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/ReleaseConfig 2023-03-21 16:17:04.000000000 +0000 @@ -31,7 +31,7 @@ # 'final', the patchlevel is set to the release date. This value is # mandatory and must be present in this file. #version_tuple = (2, 2, 0, 'final', 0) -version_tuple = (4, 3, 0, 'a', 0) +version_tuple = (4, 5, 2, 'a', 0) # Python versions prior to unsupported_python_version cause a fatal error # when that version is used. Python versions prior to deprecate_python_version @@ -50,7 +50,7 @@ #month_year = 'December 2012' # If copyright years is not given, the release year is used as the end. -copyright_years = '2001 - 2021' +copyright_years = '2001 - 2023' # Local Variables: # tab-width:4 diff -Nru scons-4.4.0+dfsg/RELEASE.txt scons-4.5.2+dfsg/RELEASE.txt --- scons-4.4.0+dfsg/RELEASE.txt 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/RELEASE.txt 2023-03-21 16:17:04.000000000 +0000 @@ -1,264 +1,32 @@ -A new SCons release, 4.4.0, is now available -on the SCons download page: +A new SCons release, 4.5.2, is now available on the SCons download page: - https://scons.org/pages/download.html - -Here is a summary of the changes since 4.3.0: - -NOTE: If you build with Python 3.10.0 and then rebuild with 3.10.1 (or higher), you may - see unexpected rebuilds. This is due to Python internals changing which changed - the signature of a Python Action Function. - - -NEW FUNCTIONALITY ------------------ - -- Added MSVC_USE_SCRIPT_ARGS Environment variable which specifies command line arguments - to pass to the script specified by MSVC_USE_SCRIPT. -- Added Configure.CheckMember() checker to check if struct/class has the specified member. -- Added SHELL_ENV_GENERATORS construction variable. This variable should be set to a list - (or an iterable) which contains functions to be called in order - when constructing the execution environment (Generally this is the shell environment - variables). This allows the user to customize how (for example) PATH is constructed. - Note that these are called for every build command run by SCons. It could have considerable - performance impact if not used carefully. -- Added MSVC_USE_SETTINGS construction variable to pass a dictionary to configure the msvc compiler - system environment as an alternative to bypassing Visual Studio autodetection entirely. -- Added MSVC_SDK_VERSION construction variable which allows building with a specific Microsoft - SDK version. This variable is used with the msvc batch file determined via autodetection. Refer - to the documentation for additional requirements and validation details. -- Added MSVC_TOOLSET_VERSION construction variable which allows building with a specific toolset - version. This variable is used with the msvc batch file determined via autodetection. This - variable does not affect the autodetection and selection of msvc instances. The toolset version - is applied after an msvc instance is selected. This could be the default version of msvc. Refer - to the documentation for additional requirements and validation details. Addresses issue #3265, - issue #3664, and pull request #4149. -- Added MSVC_SPECTRE_LIBS construction variable which allows building with spectre-mitigated - Visual C++ libraries. This variable is used with the msvc batch file determined via autodetection. - Refer to the documentation for additional requirements and validation details. -- Added MSVC_SCRIPT_ARGS construction variable which specifies command line arguments that are - passed to the msvc batch file determined via autodetection. Refer to the documentation for - additional requirements and validation details. Addresses enhancement issue #4106. -- Ninja: Added new alias "shutdown-ninja-scons-daemon" to allow ninja to shutdown the daemon. - Also added cleanup to test framework to kill ninja scons daemons and clean ip daemon logs. - NOTE: Test for this requires python psutil module. It will be skipped if not present. -- Ninja: Added command line variable NINJA_CMD_ARGS that allows to pass through ninja command line args. - This can also be set in your Environment(). -- Added a global policy setting and an environment construction variable for specifying the - action to be taken when an msvc request cannot be satisfied. The available options are "error", - "exception", "warning", "warn", "ignore", and "suppress". The global policy variable may be - set and retrieved via the functions msvc_set_notfound_policy and msvc_get_notfound_policy, - respectively. These two methods may be imported from SCons.Tool.MSCommon. The environment - construction variable is MSVC_NOTFOUND_POLICY. When defined, the environment construction - variable overrides the global policy setting for a given environment. When the active policy - is "error" or "exception", an MSVCVersionNotFound exception is raised. When the active policy - is "warning" or "warn", a VisualCMissingWarning warning is issued and the constructed - environment is likely incomplete. When the active policy is "ignore" or "suppress", no action - is taken and the constructed environment is likely incomplete. As implemented, the default - global policy is "warning". The ability to set the global policy via an SCons command-line - option may be added in a future enhancement. -- Added a global policy setting and an environment construction variable for specifying the - action to be taken when msvc script errors are detected. The available options are "error", - "exception", "warning", "warn", "ignore", and "suppress". The global policy variable may be - set and retrieved via the functions msvc_set_scripterror_policy and msvc_get_scripterror_policy, - respectively. These two methods may be imported from SCons.Tool.MSCommon. The environment - construction variable is MSVC_SCRIPTERROR_POLICY. When defined, the environment construction - variable overrides the global policy setting for a given environment. When the active policy - is "error" or "exception", an MSVCScriptExecutionError exception is raised when msvc batch file - errors are detected. When the active policy is "warning" or "warn", an MSVCScriptExecutionWarning - warning is issued when msvc batch file errors are detected. When the active policy is "ignore" or - "suppress", msvc batch error messages are suppressed. As implemented, the default global policy - is "ignore". The ability to set the global policy via an SCons command-line option may be added - in a future enhancement. -- Added experimental function msvc_query_version_toolset to SCons.Tool.MSCommon. Given a version - specification, this function will return an msvc version and an msvc toolset version. The msvc - toolset version may be None. The msvc version and msvc toolset version can be used in the - environment construction variables MSVC_VERSION and MSVC_TOOLSET_VERSION, respectively. The - version specification may be an msvc version or an msvc toolset version. This is a proxy for - using an msvc toolset version to select an msvc instance. This function may be removed when an - msvc toolset version is used during msvc instance selection. -- Fortran: a new construction variable FORTRANCOMMONFLAGS is added which is - applied to all Fortran dialects, to enable global (all-dialect) settings. -- lex: two new construction variables are introduced (LEX_HEADER_ILE - and LEX_TABLES_FILE) as the preferred way of specifying extra files that - the tool can generate. -- yacc: two new construction variables are introduced (YACC_HEADER_FILE - and YACC_GRAPH_FILE) as the preferred way of specifying extra files that - the tool can generate (applies only when using GNU flex and GNU bison). - - -CHANGED/ENHANCED EXISTING FUNCTIONALITY ---------------------------------------- - -- On Windows, %AllUsersProfile%\scons\site_scons is now the default "system" - location for a site_scons directory. - %AllUsersProfile%\Application Data\scons\site_scons will continue to work. - There does not seem to be any existing convention to use an - "Application Data" subdirectory here. -- Action._subproc() can now be used as a python context manager to ensure that the - POpen object is properly closed. -- SCons help (-H) no longer prints the "ignored for compatibility" options, - which are still listed in the manpage. -- Help is now sensitive to the size of the terminal window: the width of the - help text will scale to wider (or narrower) terminals than 80 characters. -- Ninja: Changed generated build.ninja file to run SCons only build Actions via - a SCons Deamon. Added logic for starting and connecting to SCons daemon (currently - only used for ninja) -- The change to "content" and "content-timestamp" Decider names is reflected - in the User Guide as well, since the hash function may be other than md5 - (tidying up from earlier change) -- If the (experimental) SCONS_CACHE_MSVC_CONFIG feature is used, it will now - attempt a sanity check for the cached compiler information, and regenerate - it if needed. Previously, this cache would fail if a compiler upgrade caused - a change to internal paths (e.g. upgrading from 17.1 to 17.2 causes - a necessary path component in some of the cached vars to need to 14.32.31326 - instead of 14.31.31103), and the cache file needed to be manually removed. - The default cachefile name is now "scons_msvc_cache.json" rather than - ".scons_msvc_cache" so there should be no transition problem if using the - default; if using a custom cache file name, the cache should still be - manually removed if there are problems to transition to the new style. -- Ninja: Update ninja file generation to only create response files for build commands - which exceed MAXLINELENGTH -- Update the debug output written to stdout for MSVC initialization which is enabled - by setting SCONS_MSCOMMON_DEBUG=- to use the logging module. Also changed the debug - output format written to stdout to include more information about the source for each - message of MSVC initialization debugging output. A single space was added before the - message for all debugging output records written to stdout and to files. -- Ninja: Made ninja tool force the ninja file as the only target. Also improved the default - targets setup and made sure there is always a default target for - the ninja file, which excludes targets that start and stop the daemon. -- Ninja: Update ninja tool so targets passed to SCons are propgated to ninja when scons - automatically executes ninja. -- Add JavaScanner to include JAVACLASSPATH as a dependency when using the Java tool. -- The build argument (i.e., x86) is no longer passed to the MSVC 6.0 to 7.1 batch - files. This may improve the effectiveness of the internal msvc cache when using - MSVC detection and when bypassing MSVC detection as the MSVC 6.0 to 7.1 batch files - do not expect any arguments. -- Propagate the OS and windir environment variables from the system environment to the msvc - environment. The OS and windir environment variables are used in the MSVC 6.0 batch file - and the SDK 6.0-7.1 SetEnv.cmd batch files. Inclusion of the OS and windir environment - variables eliminates some partial paths and warnings generated by the MSVC 6.0 and SDK - 6.0-7.1 batch files when the variables are not defined. - Note: Attempting to run the SDK 6.0-7.1 batch files directly via MSVC_USE_SCRIPT can lead to - build failures and/or incomplete build environments. The SDK 6.0-7.1 batch files - require delayed expansion to be enabled which is currently not supported and is - typically not enabled by default on the host system. The batch files may also require - environment variables that are not included by default in the msvc environment. -- An exception is raised when MSVC_UWP_APP is enabled for Visual Studio 2013 and earlier. - Previous behavior was to silently ignore MSVC_UWP_APP when enabled for Visual Studio 2013 - and earlier. Refer to the documentation for additional requirements and validation details. - MSVC_UWP_APP was extended to accept True, False, and None in addition to '1' and '0'. -- Ninja: added option "--skip-ninja-regen" to enable skipping regeneration of the ninja file - if scons can determine the ninja file doesnot need to be regenerated, which will also - skip restarting the scons daemon. Note this option is could result in incorrect rebuilds - if scons Glob or scons generated files are used in ninja build target's command lines. -- Tool loading used to have a special case for Jython, it no longer does. This effectively - means SCons doesn't work with Jython, which has in reality been the case ever since - SCons dropped Python 2 support - there is still no timeline for Jython switching to - Python 3 compatibility. + https://scons.org/pages/download.html +Here is a summary of the changes since 4.5.1: FIXES ----- -- Fix a number of Python ResourceWarnings which are issued when running SCons and/or it's tests - with python 3.9 (or higher) -- Ninja: Fix issue where Configure files weren't being properly processed when build run - via ninja. -- Fixed crash in C scanner's dictify_CPPDEFINES() function which happens if - AppendUnique is called on CPPPATH. (Issue #4108). -- Added default values for source and target arguments to _defines() function. This - is used to expand CPPDEFINES (and others). Previous change added those arguments - with no defaults, so old usage where _defines() was called without source and target - arguments would yield an exception. This issue was found via qt4 and qt5 tools in - scons-contrib https://github.com/SCons/scons-contrib/issues/45 -- Fix issue where if you only had mingw installed on a Windows system and no MSVC compiler, and - did not explicitly request the mingw tool, mingw tool initialization would fail and set the - default compiler to MSVC which wasn't installed, yielding broken build. - Updated mingw tool so that the generate and exists methods use the same mingw search paths - (issue #4134). -- Ninja: Added NINJA_GENERATED_SOURCE_ALIAS_NAME which allows user to specify an - Alias() which the ninja tool can use to determine which files are generated sources. - If this is not set by the user then the ninja tool will still dynamically determine - which files are generated sources based on NINJA_GENERATED_SOURCE_SUFFIXES, and create - a phony target _ninja_generated_sources. Generated sources will be built first by - ninja. This is needed because ninja cannot determine which generated sources are - required by other build targets. Code contributed by MongoDB. -- Added special case for ninja scons daemon to work in win32 python3.6 environments. - This particular environment does a bad job managing popen standard file handles, so - some special workarounds are needed. -- Added user configurable setting of ninja depfile format via NINJA_DEPFILE_PARSE_FORMAT. - Now setting NINJA_DEPFILE_PARSE_FORMAT to [msvc,gcc,clang] can force the ninja expected - format. Compiler tools will also configure the variable automatically. -- Fix issue where Express versions of the MSVC compiler were not detected due to differences - in initial msvc detection and msvc batch file determination when configuring the build - environment. This could lead to build failures when only an MSVC Express instance is installed - and the MSVC version is not explicitly specified (issue #2668 and issue #2697). -- Restore the ability of the content-timestamp decider to see that a - a source which is a symlink has changed if the file-system target of - that link has been modified (issue #3880) -- Fix typo in ninja scons daemon startup which causes ConnectionRefusedError to not retry - to connect to the server during start up. -- Fix incorrect Java classpath generation when a NodeList is used as part of any JAVA*PATH variables. -- The system environment variable names imported for MSVC 7.0 and 6.0 were updated to be - consistent with the variables names defined by their respective installers. This fixes an - error caused when bypassing MSVC detection by specifying the MSVC 7.0 batch file directly. -- lex: Fixed an issue with the lex tool where file arguments specified to either "--header-file=" - or "--tables-file=" which included a space in the path to the file would be processed incorrectly -- Modify the MSCommon logger configuration to be independent of the root logger. This fixes an issue - when multiple loggers are created and the MSCommon logger added computed fields to the root logger - that are not present in other logging instances. -- Modify the MSVC_USE_SCRIPT_ARGS test fixture to disable the msvc cache. This fixes an issue where - the MSVC_USE_SCRIPT_ARGS test for success relied on a debug log message that was not produced when - the msvc cache file exists and the test keys are already in the cache as the msvc script invocation - was bypassed. -- Suppress issuing a warning when there are no installed Visual Studio instances for the default - tools configuration (issue #2813). When msvc is the default compiler because there are no - compilers installed, a build may fail due to the cl.exe command not being recognized. At - present, there is no easy way to detect during msvc initialization if the default environment - will be used later to build a program and/or library. There is no error/warning issued for the - default tools as there are legitimate SCons uses that do not require a c compiler. - -IMPROVEMENTS ------------- - -- Verify that a user specified msvc script (via MSVC_USE_SCRIPT) exists and raise an - exception immediately when the user specified msvc script does not exist. -- Add cache-debug messages for push failures. -- Ninja: Added ninja mingw support and improved ninja CommandGeneratorAction support. -- Command-line help is now sensitive to the size of the terminal window: the - width of the help text will scale for terminals other than 80 chars wide. -- Refactor the msvc code so that the same data structures are used during initial msvc detection - and msvc batch file determination when configuring the build environment. Simplify the msvc - code by eliminating special case handling primarily due to the differences between the full - versions and express versions of visual studio. -- Small refactor of scons daemons using a shared StateInfo class for communication - between the scons interactive thread and the http server thread. Added error handling - for scons interactive failing to startup. -- Ninja: Updated ninja scons daemon scripts to output errors to stderr as well as the daemon log. -- Ninja: Ensure that all targets set as default via Default() in SConstruct/SConscripts are - default targets in the generated ninja.build file. +- Fix a problem (#4321) in 4.5.0/4.5.1 where ParseConfig could cause an + exception in MergeFlags when the result would be to add preprocessor + defines to existing CPPDEFINES. The following code illustrates the + circumstances that could trigger this: + env=Environment(CPPDEFINES=['a']) + env.Append(CPPDEFINES=['b']) + env.MergeFlags({'CPPDEFINES': 'c'}) PACKAGING --------- -- Added project_url for mailing lists and Discord -- Updated setup.cfg to remove Python 3.5 and add Python 3.10 +- Remove the redundant `wheel` dependency from `pyproject.toml`, + as it is added automatically by the setuptools PEP517 backend. Thanks to the following contributors listed below for their contributions to this release. ========================================================================================== .. code-block:: text - git shortlog --no-merges -ns 4.3.0..HEAD - git shortlog --no-merges -ns 4.3.0..HEAD - 174 Joseph Brill - 126 Mats Wichmann - 93 William Deegan - 64 Daniel Moody - 4 SergBobrovsky - 2 dependabot[bot] - 1 djh - 1 Ivan Kravets - 1 Vishwajith-K - 1 Zhichang Yu + git shortlog --no-merges -ns 4.5.1..HEAD + 3 Mats Wichmann + 2 William Deegan + 1 Michał Górny diff -Nru scons-4.4.0+dfsg/requirements-dev.txt scons-4.5.2+dfsg/requirements-dev.txt --- scons-4.4.0+dfsg/requirements-dev.txt 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/requirements-dev.txt 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,14 @@ +# Dependencies for development - mainly for running tests + +-r requirements.txt + +# for now keep pinning "known working" lxml, +# it's been a troublesome component in the past. +# Skip lxml for win32 as no tests which require it currently pass on win32 +lxml==4.9.2; python_version < '3.12' and sys_platform != 'win32' + +ninja + +# Needed for test/Parallel/failed-build/failed-build.py +# Also for test/ninja/shutdown_scons_daemon.py +psutil diff -Nru scons-4.4.0+dfsg/requirements-pkg.txt scons-4.5.2+dfsg/requirements-pkg.txt --- scons-4.4.0+dfsg/requirements-pkg.txt 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/requirements-pkg.txt 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,13 @@ +# Dependencies for packaging, and distribution, including the docs build + +-r requirements-dev.txt + +# Check if the README.rst will render o.k. on pypi and not block an upload +# Can be used with twinecheck +# See: https://github.com/pypa/readme_renderer +readme-renderer + +# sphinx pinned because it has broken several times on new releases +sphinx < 6.0 +sphinx-book-theme +rst2pdf diff -Nru scons-4.4.0+dfsg/requirements.txt scons-4.5.2+dfsg/requirements.txt --- scons-4.4.0+dfsg/requirements.txt 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/requirements.txt 2023-03-21 16:17:04.000000000 +0000 @@ -1,19 +1 @@ -# Packages needed for development, packaging, and distribution, but not for running SCons - -# This will check if the README.rst will render o.k. on pypi and not block an upload -# Can be used with twinecheck -# See: https://github.com/pypa/readme_renderer -readme-renderer -#sphinx<=5.0.0 -sphinx>=5.1.1 -sphinx_rtd_theme -rst2pdf -# for now keep pinning "known working" lxml, -# it's been a troublesome component in the past. -lxml==4.9.1 -rst2pdf -ninja - -# Needed for test/Parallel/failed-build/failed-build.py -# Also for test/ninja/shutdown_scons_daemon.py -psutil +# No dependencies for running SCons diff -Nru scons-4.4.0+dfsg/rpm/scons.spec.in scons-4.5.2+dfsg/rpm/scons.spec.in --- scons-4.4.0+dfsg/rpm/scons.spec.in 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/rpm/scons.spec.in 1970-01-01 00:00:00.000000000 +0000 @@ -1,54 +0,0 @@ -%define name scons -%define version __VERSION__ -%define release 1 -%define _unpackaged_files_terminate_build 0 - -Summary: an Open Source software construction tool -Name: %{name} -Version: %{version} -Release: %{release} -Source0: %{name}-%{version}.tar.gz -#Copyright: The SCons Foundation -License: MIT, freely distributable -Group: Development/Tools -BuildRoot: %{_tmppath}/%{name}-buildroot -Prefix: %{_prefix} -BuildArchitectures: noarch -Vendor: The SCons Development Team -Packager: The SCons Development Team -Requires: python >= 2.4 -Url: http://www.scons.org/ - -%description -SCons is an Open Source software construction tool--that is, a build -tool; an improved substitute for the classic Make utility; a better way -to build software. SCons is based on the design which won the Software -Carpentry build tool design competition in August 2000. - -SCons "configuration files" are Python scripts, eliminating the need -to learn a new build tool syntax. SCons maintains a global view of -all dependencies in a tree, and can scan source (or other) files for -implicit dependencies, such as files specified on #include lines. SCons -uses MD5 signatures to rebuild only when the contents of a file have -really changed, not just when the timestamp has been touched. SCons -supports side-by-side variant builds, and is easily extended with user- -defined Builder and/or Scanner objects. - -%prep -%setup - -%build -python setup.py build - -%install -python setup.py install --root=$RPM_BUILD_ROOT --install-lib=/usr/lib/scons --install-scripts=/usr/bin --install-data=/usr/share - -%clean -rm -rf $RPM_BUILD_ROOT - -%files -%defattr(-,root,root) -__RPM_FILES__ -%doc %{_mandir}/man1/scons.1* -%doc %{_mandir}/man1/sconsign.1* -%doc %{_mandir}/man1/scons-time.1* diff -Nru scons-4.4.0+dfsg/runtest.py scons-4.5.2+dfsg/runtest.py --- scons-4.4.0+dfsg/runtest.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/runtest.py 2023-03-21 16:17:04.000000000 +0000 @@ -22,7 +22,6 @@ import argparse import glob import os -import re import stat import subprocess import sys @@ -311,15 +310,6 @@ return buf.value -_ws = re.compile(r'\s') - -def escape(s): - if _ws.search(s): - s = '"' + s + '"' - s = s.replace('\\', '\\\\') - return s - - if not catch_output: # Without any output suppressed, we let the subprocess # write its stuff freely to stdout/stderr. @@ -432,8 +422,7 @@ A bit of a misnomer as the Popen call is now wrapped by calling subprocess.run (behind the covers uses Popen. - Very similar to SystemExecutor, but uses command_str - instead of command_args, and doesn't allow for not catching + Very similar to SystemExecutor, but doesn't allow for not catching the output). """ # For an explanation of the following 'if ... else' @@ -447,7 +436,7 @@ tmp_stderr = tempfile.TemporaryFile(mode='w+t') # Start subprocess... cp = subprocess.run( - self.command_str.split(), + self.command_args, stdout=tmp_stdout, stderr=tmp_stderr, shell=False, @@ -470,7 +459,7 @@ def execute(self, env): cp = subprocess.run( - self.command_str.split(), + self.command_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, @@ -784,7 +773,7 @@ if args.runner and t.path in unittests: # For example --runner TestUnit.TAPTestRunner command_args.append('--runner ' + args.runner) - t.command_args = [escape(args.python)] + command_args + t.command_args = [args.python] + command_args t.command_str = " ".join(t.command_args) if args.printcommand: if args.print_progress: diff -Nru scons-4.4.0+dfsg/SCons/Action.py scons-4.5.2+dfsg/SCons/Action.py --- scons-4.4.0+dfsg/SCons/Action.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Action.py 2023-03-21 16:17:04.000000000 +0000 @@ -110,6 +110,7 @@ from collections import OrderedDict import SCons.Debug +import SCons.Util from SCons.Debug import logInstanceCreation import SCons.Errors import SCons.Util @@ -733,34 +734,42 @@ def get_default_ENV(env): - """ - A fiddlin' little function that has an 'import SCons.Environment' which - can't be moved to the top level without creating an import loop. Since - this import creates a local variable named 'SCons', it blocks access to - the global variable, so we move it here to prevent complaints about local - variables being used uninitialized. + """Returns an execution environment. + + If there is one in *env*, just use it, else return the Default + Environment, insantiated if necessary. + + A fiddlin' little function that has an ``import SCons.Environment`` + which cannot be moved to the top level without creating an import + loop. Since this import creates a local variable named ``SCons``, + it blocks access to the global variable, so we move it here to + prevent complaints about local variables being used uninitialized. """ global default_ENV + try: return env['ENV'] except KeyError: if not default_ENV: import SCons.Environment - # This is a hideously expensive way to get a default shell + # This is a hideously expensive way to get a default execution # environment. What it really should do is run the platform # setup to get the default ENV. Fortunately, it's incredibly - # rare for an Environment not to have a shell environment, so - # we're not going to worry about it overmuch. + # rare for an Environment not to have an execution environment, + # so we're not going to worry about it overmuch. default_ENV = SCons.Environment.Environment()['ENV'] return default_ENV def _resolve_shell_env(env, target, source): - """ - First get default environment. - Then if SHELL_ENV_GENERATORS is set and is iterable, - call each callable in that list to allow it to alter - the created execution environment. + """Returns a resolved execution environment. + + First get the execution environment. Then if ``SHELL_ENV_GENERATORS`` + is set and is iterable, call each function to allow it to alter the + created execution environment, passing each the returned execution + environment from the previous call. + + .. versionadded:: 4.4 """ ENV = get_default_ENV(env) shell_gen = env.get('SHELL_ENV_GENERATORS') @@ -793,28 +802,11 @@ if is_String(io) and io == 'devnull': kw[stream] = DEVNULL - # Figure out what shell environment to use + # Figure out what execution environment to use ENV = kw.get('env', None) if ENV is None: ENV = get_default_ENV(scons_env) - # Ensure that the ENV values are all strings: - new_env = {} - for key, value in ENV.items(): - if is_List(value): - # If the value is a list, then we assume it is a path list, - # because that's a pretty common list-like value to stick - # in an environment variable: - value = SCons.Util.flatten_sequence(value) - new_env[key] = os.pathsep.join(map(str, value)) - else: - # It's either a string or something else. If it's a string, - # we still want to call str() because it might be a *Unicode* - # string, which makes subprocess.Popen() gag. If it isn't a - # string or a list, then we just coerce it to a string, which - # is the proper way to handle Dir and File instances and will - # produce something reasonable for just about everything else: - new_env[key] = str(value) - kw['env'] = new_env + kw['env'] = SCons.Util.sanitize_shell_env(ENV) try: pobj = subprocess.Popen(cmd, **kw) @@ -883,11 +875,11 @@ return ' '.join(map(str, self.cmd_list)) return str(self.cmd_list) - def process(self, target, source, env, executor=None): + def process(self, target, source, env, executor=None, overrides=False): if executor: - result = env.subst_list(self.cmd_list, 0, executor=executor) + result = env.subst_list(self.cmd_list, 0, executor=executor, overrides=overrides) else: - result = env.subst_list(self.cmd_list, 0, target, source) + result = env.subst_list(self.cmd_list, 0, target, source, overrides=overrides) silent = None ignore = None while True: @@ -904,18 +896,18 @@ pass return result, ignore, silent - def strfunction(self, target, source, env, executor=None): + def strfunction(self, target, source, env, executor=None, overrides=False): if self.cmdstr is None: return None if self.cmdstr is not _null: from SCons.Subst import SUBST_RAW if executor: - c = env.subst(self.cmdstr, SUBST_RAW, executor=executor) + c = env.subst(self.cmdstr, SUBST_RAW, executor=executor, overrides=overrides) else: - c = env.subst(self.cmdstr, SUBST_RAW, target, source) + c = env.subst(self.cmdstr, SUBST_RAW, target, source, overrides=overrides) if c: return c - cmd_list, ignore, silent = self.process(target, source, env, executor) + cmd_list, ignore, silent = self.process(target, source, env, executor, overrides=overrides) if silent: return '' return _string_from_cmd_list(cmd_list[0]) diff -Nru scons-4.4.0+dfsg/SCons/ActionTests.py scons-4.5.2+dfsg/SCons/ActionTests.py --- scons-4.4.0+dfsg/SCons/ActionTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/ActionTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -142,15 +142,15 @@ self.d[k] = v # Just use the underlying scons_subst*() utility methods. - def subst(self, strSubst, raw=0, target=[], source=[], conv=None): + def subst(self, strSubst, raw=0, target=[], source=[], conv=None, overrides=False): return SCons.Subst.scons_subst(strSubst, self, raw, - target, source, self.d, conv=conv) + target, source, self.d, conv=conv, overrides=overrides) subst_target_source = subst - def subst_list(self, strSubst, raw=0, target=[], source=[], conv=None): + def subst_list(self, strSubst, raw=0, target=[], source=[], conv=None, overrides=False): return SCons.Subst.scons_subst_list(strSubst, self, raw, - target, source, self.d, conv=conv) + target, source, self.d, conv=conv, overrides=overrides) def __getitem__(self, item): return self.d[item] @@ -1541,6 +1541,7 @@ (3, 9): bytearray(b'0, 0, 0, 0,(),(),(d\x00S\x00),(),()'), (3, 10): bytearray(b'0, 0, 0, 0,(),(),(d\x00S\x00),(),()'), (3, 11): bytearray(b'0, 0, 0, 0,(),(),(\x97\x00d\x00S\x00),(),()'), + (3, 12): bytearray(b'0, 0, 0, 0,(),(),(\x97\x00d\x00S\x00),(),()'), } meth_matches = [ @@ -1719,6 +1720,7 @@ (3, 9): bytearray(b'0, 0, 0, 0,(),(),(d\x00S\x00),(),()'), (3, 10): bytearray(b'0, 0, 0, 0,(),(),(d\x00S\x00),(),()'), (3, 11): bytearray(b'0, 0, 0, 0,(),(),(\x97\x00d\x00S\x00),(),()'), + (3, 12): bytearray(b'0, 0, 0, 0,(),(),(\x97\x00d\x00S\x00),(),()'), } @@ -1730,6 +1732,7 @@ (3, 9): bytearray(b'1, 1, 0, 0,(),(),(d\x00S\x00),(),()'), (3, 10): bytearray(b'1, 1, 0, 0,(),(),(d\x00S\x00),(),()'), (3, 11): bytearray(b'1, 1, 0, 0,(),(),(\x97\x00d\x00S\x00),(),()'), + (3, 12): bytearray(b'1, 1, 0, 0,(),(),(\x97\x00d\x00S\x00),(),()'), } def factory(act, **kw): @@ -1974,6 +1977,7 @@ (3, 9): bytearray(b'0, 0, 0, 0,(),(),(d\x00S\x00),(),()'), (3, 10): bytearray(b'0, 0, 0, 0,(),(),(d\x00S\x00),(),()'), (3, 11): bytearray(b'0, 0, 0, 0,(),(),(\x97\x00d\x00S\x00),(),()'), + (3, 12): bytearray(b'0, 0, 0, 0,(),(),(\x97\x00d\x00S\x00),(),()'), } meth_matches = [ @@ -2039,6 +2043,7 @@ (3, 9): b'd\x00S\x00', (3, 10): b'd\x00S\x00', (3, 11): b'\x97\x00d\x00S\x00', + (3, 12): b'\x97\x00d\x00S\x00', } af = SCons.Action.ActionFactory(GlobalFunc, strfunc) @@ -2250,6 +2255,7 @@ bytearray(b'3, 3, 0, 0,(),(),(|\x00S\x00),(),()'), ), # 3.10.1, 3.10.2 (3, 11): bytearray(b'3, 3, 0, 0,(),(),(\x97\x00|\x00S\x00),(),()'), + (3, 12): bytearray(b'3, 3, 0, 0,(),(),(\x97\x00|\x00S\x00),(),()'), } c = SCons.Action._function_contents(func1) @@ -2288,7 +2294,11 @@ b"{TestClass:__main__}[[[(, ()), [(, (,))]]]]{{1, 1, 0, 0,(a,b),(a,b),(d\x01|\x00_\x00d\x02|\x00_\x01d\x00S\x00),(),(),2, 2, 0, 0,(),(),(d\x00S\x00),(),()}}{{{a=a,b=b}}}" ), (3, 11): bytearray( - b"{TestClass:__main__}[[[(, ()), [(, (,))]]]]{{1, 1, 0, 0,(a,b),(a,b),(\x97\x00d\x01|\x00_\x00\x00\x00\x00\x00\x00\x00\x00\x00d\x02|\x00_\x01\x00\x00\x00\x00\x00\x00\x00\x00d\x00S\x00),(),(),2, 2, 0, 0,(),(),(\x97\x00d\x00S\x00),(),()}}{{{a=a,b=b}}}"), + b"{TestClass:__main__}[[[(, ()), [(, (,))]]]]{{1, 1, 0, 0,(a,b),(a,b),(\x97\x00d\x01|\x00_\x00\x00\x00\x00\x00\x00\x00\x00\x00d\x02|\x00_\x01\x00\x00\x00\x00\x00\x00\x00\x00d\x00S\x00),(),(),2, 2, 0, 0,(),(),(\x97\x00d\x00S\x00),(),()}}{{{a=a,b=b}}}" + ), + (3, 12): bytearray( + b"{TestClass:__main__}[[[(, ()), [(, (,))]]]]{{1, 1, 0, 0,(a,b),(a,b),(\x97\x00d\x01|\x00_\x00\x00\x00\x00\x00\x00\x00\x00\x00d\x02|\x00_\x01\x00\x00\x00\x00\x00\x00\x00\x00d\x00S\x00),(),(),2, 2, 0, 0,(),(),(\x97\x00d\x00S\x00),(),()}}{{{a=a,b=b}}}" + ), } assert c == expected[sys.version_info[:2]], f"Got\n{c!r}\nExpected\n" + repr( @@ -2322,7 +2332,11 @@ b'0, 0, 0, 0,(Hello, World!),(print),(e\x00d\x00\x83\x01\x01\x00d\x01S\x00)' ), (3, 11): bytearray( - b'0, 0, 0, 0,(Hello, World!),(print),(\x97\x00\x02\x00e\x00d\x00\xa6\x01\x00\x00\xab\x01\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00d\x01S\x00)'), + b'0, 0, 0, 0,(Hello, World!),(print),(\x97\x00\x02\x00e\x00d\x00\xa6\x01\x00\x00\xab\x01\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00d\x01S\x00)' + ), + (3, 12): bytearray( + b'0, 0, 0, 0,(Hello, World!),(print),(\x97\x00\x02\x00e\x00d\x00\xab\x01\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00d\x01S\x00)' + ), } assert c == expected[sys.version_info[:2]], f"Got\n{c!r}\nExpected\n" + repr( diff -Nru scons-4.4.0+dfsg/SCons/Action.xml scons-4.5.2+dfsg/SCons/Action.xml --- scons-4.4.0+dfsg/SCons/Action.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Action.xml 2023-03-21 16:17:04.000000000 +0000 @@ -26,14 +26,13 @@ -Controls whether or not SCons will +Controls whether or not &SCons; will add implicit dependencies for the commands executed to build targets. -By default, SCons will add -to each target +By default, &SCons; will add to each target an implicit dependency on the command represented by the first argument of any command line it executes (which is typically @@ -224,21 +223,39 @@ -Must be a list (or an iterable) containing functions where each function generates or -alters the environment dictionary which will be used -when executing the &cv-link-SPAWN; function. The functions will initially -be passed a reference of the current execution environment (e.g. env['ENV']), -and each called while iterating the list. Each function must return a dictionary -which will then be passed to the next function iterated. The return dictionary -should contain keys which represent the environment variables and their respective -values. - -This primary purpose of this construction variable is to give the user the ability -to substitute execution environment variables based on env, targets, and sources. -If desired, the user can completely customize the execution environment for particular -targets. +A hook allowing the execution environment to be modified prior +to the actual execution of a command line from an action +via the spawner function defined by &cv-link-SPAWN;. +Allows substitution based on targets and sources, +as well as values from the &consenv;, +adding extra environment variables, etc. + +The value must be a list (or other iterable) +of functions which each generate or +alter the execution environment dictionary. +The first function will be passed a copy of the initial execution environment +(&cv-link-ENV; in the current &consenv;); +the dictionary returned by that function is passed to the next, +until the iterable is exhausted and the result returned +for use by the command spawner. +The original execution environment is not modified. + + + +Each function provided in &cv-SHELL_ENV_GENERATORS; must accept four +arguments and return a dictionary: +env is the &consenv; for this action; +target is the list of targets associated with this action; +source is the list of sources associated with this action; +and shell_env is the current dictionary after iterating +any previous &cv-SHELL_ENV_GENERATORS; functions +(this can be compared to the original execution environment, +which is available as env['ENV'], to detect any changes). + + + Example: def custom_shell_env(env, target, source, shell_env): """customize shell_env if desired""" @@ -249,24 +266,7 @@ env["SHELL_ENV_GENERATORS"] = [custom_shell_env] - - env -The SCons construction environment from which the -execution environment can be derived from. - - - target -The list of targets associated with this action. - - - source -The list of sources associated with this action. - - - shell_env -The current shell_env after iterating other SHELL_ENV_GENERATORS functions. This can be compared -to the passed env['ENV'] to detect any changes. - + Available since 4.4 diff -Nru scons-4.4.0+dfsg/SCons/CacheDir.py scons-4.5.2+dfsg/SCons/CacheDir.py --- scons-4.4.0+dfsg/SCons/CacheDir.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/CacheDir.py 2023-03-21 16:17:04.000000000 +0000 @@ -211,35 +211,42 @@ (self.requests, self.hits, self.misses, self.hit_ratio)) @classmethod - def copy_from_cache(cls, env, src, dst): + def copy_from_cache(cls, env, src, dst) -> str: + """Copy a file from cache.""" if env.cache_timestamp_newer: return env.fs.copy(src, dst) else: return env.fs.copy2(src, dst) @classmethod - def copy_to_cache(cls, env, src, dst): + def copy_to_cache(cls, env, src, dst) -> str: + """Copy a file to cache. + + Just use the FS copy2 ("with metadata") method, except do an additional + check and if necessary a chmod to ensure the cachefile is writeable, + to forestall permission problems if the cache entry is later updated. + """ try: result = env.fs.copy2(src, dst) - fs = env.File(src).fs - st = fs.stat(src) - fs.chmod(dst, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) + st = stat.S_IMODE(os.stat(result).st_mode) + if not st | stat.S_IWRITE: + os.chmod(dst, st | stat.S_IWRITE) return result except AttributeError as ex: raise EnvironmentError from ex @property - def hit_ratio(self): + def hit_ratio(self) -> float: return (100.0 * self.hits / self.requests if self.requests > 0 else 100) @property - def misses(self): + def misses(self) -> int: return self.requests - self.hits - def is_enabled(self): + def is_enabled(self) -> bool: return cache_enabled and self.path is not None - def is_readonly(self): + def is_readonly(self) -> bool: return cache_readonly def get_cachedir_csig(self, node): @@ -247,18 +254,21 @@ if cachefile and os.path.exists(cachefile): return SCons.Util.hash_file_signature(cachefile, SCons.Node.FS.File.hash_chunksize) - def cachepath(self, node): - """ + def cachepath(self, node) -> tuple: + """Return where to cache a file. + + Given a Node, obtain the configured cache directory and + the path to the cached file, which is generated from the + node's build signature. If caching is not enabled for the + None, return a tuple of None. """ if not self.is_enabled(): return None, None sig = node.get_cachedir_bsig() - subdir = sig[:self.config['prefix_len']].upper() - - dir = os.path.join(self.path, subdir) - return dir, os.path.join(dir, sig) + cachedir = os.path.join(self.path, subdir) + return cachedir, os.path.join(cachedir, sig) def retrieve(self, node): """ diff -Nru scons-4.4.0+dfsg/SCons/Conftest.py scons-4.5.2+dfsg/SCons/Conftest.py --- scons-4.4.0+dfsg/SCons/Conftest.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Conftest.py 2023-03-21 16:17:04.000000000 +0000 @@ -267,7 +267,7 @@ #ifdef __cplusplus extern "C" #endif -char %s();""" % function_name +char %s(void);""" % function_name lang, suffix, msg = _lang2suffix(language) if msg: @@ -285,7 +285,7 @@ int main(void) { #if defined (__stub_%(name)s) || defined (__stub___%(name)s) - fail fail fail + #error "%(name)s has a GNU stub, cannot check" #else %(name)s(); #endif @@ -627,7 +627,7 @@ def CheckLib(context, libs, func_name = None, header = None, extra_libs = None, call = None, language = None, autoadd = 1, - append = True): + append=True, unique=False): """ Configure check for a C or C++ libraries "libs". Searches through the list of libraries, until one is found where the test succeeds. @@ -713,9 +713,9 @@ if extra_libs: l.extend(extra_libs) if append: - oldLIBS = context.AppendLIBS(l) + oldLIBS = context.AppendLIBS(l, unique) else: - oldLIBS = context.PrependLIBS(l) + oldLIBS = context.PrependLIBS(l, unique) sym = "HAVE_LIB" + lib_name else: oldLIBS = -1 diff -Nru scons-4.4.0+dfsg/SCons/Defaults.py scons-4.5.2+dfsg/SCons/Defaults.py --- scons-4.4.0+dfsg/SCons/Defaults.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Defaults.py 2023-03-21 16:17:04.000000000 +0000 @@ -36,15 +36,18 @@ import stat import sys import time +from typing import List import SCons.Action import SCons.Builder import SCons.CacheDir import SCons.Environment +import SCons.Errors import SCons.PathList import SCons.Scanner.Dir import SCons.Subst import SCons.Tool +from SCons.Util import is_List, is_String, is_Sequence, is_Tuple, is_Dict, flatten # A placeholder for a default Environment (for fetching source files # from source code management systems and the like). This must be @@ -79,7 +82,6 @@ """ global _default_env if not _default_env: - import SCons.Util _default_env = SCons.Environment.Environment(*args, **kw) _default_env.Decider('content') global DefaultEnvironment @@ -157,15 +159,19 @@ ActionFactory = SCons.Action.ActionFactory -def get_paths_str(dest): - # If dest is a list, we need to manually call str() on each element - if SCons.Util.is_List(dest): - elem_strs = [] - for element in dest: - elem_strs.append('"' + str(element) + '"') - return '[' + ', '.join(elem_strs) + ']' +def get_paths_str(dest) -> str: + """Generates a string from *dest* for use in a strfunction. + + If *dest* is a list, manually converts each elem to a string. + """ + def quote(arg): + return f'"{arg}"' + + if is_List(dest): + elem_strs = [quote(d) for d in dest] + return f'[{", ".join(elem_strs)}]' else: - return '"' + str(dest) + '"' + return quote(dest) permission_dic = { @@ -187,15 +193,21 @@ } -def chmod_func(dest, mode): - import SCons.Util +def chmod_func(dest, mode) -> None: + """Implementation of the Chmod action function. + + *mode* can be either an integer (normally expressed in octal mode, + as in 0o755) or a string following the syntax of the POSIX chmod + command (for example "ugo+w"). The latter must be converted, since + the underlying Python only takes the numeric form. + """ from string import digits SCons.Node.FS.invalidate_node_memos(dest) - if not SCons.Util.is_List(dest): + if not is_List(dest): dest = [dest] - if SCons.Util.is_String(mode) and 0 not in [i in digits for i in mode]: + if is_String(mode) and 0 not in [i in digits for i in mode]: mode = int(mode, 8) - if not SCons.Util.is_String(mode): + if not is_String(mode): for element in dest: os.chmod(str(element), mode) else: @@ -231,57 +243,87 @@ os.chmod(str(element), curr_perm & ~new_perm) -def chmod_strfunc(dest, mode): - import SCons.Util - if not SCons.Util.is_String(mode): - return 'Chmod(%s, 0%o)' % (get_paths_str(dest), mode) +def chmod_strfunc(dest, mode) -> str: + """strfunction for the Chmod action function.""" + if not is_String(mode): + return f'Chmod({get_paths_str(dest)}, {mode:#o})' else: - return 'Chmod(%s, "%s")' % (get_paths_str(dest), str(mode)) + return f'Chmod({get_paths_str(dest)}, "{mode}")' + Chmod = ActionFactory(chmod_func, chmod_strfunc) -def copy_func(dest, src, symlinks=True): - """ - If symlinks (is true), then a symbolic link will be + +def copy_func(dest, src, symlinks=True) -> int: + """Implementation of the Copy action function. + + Copies *src* to *dest*. If *src* is a list, *dest* must be + a directory, or not exist (will be created). + + Since Python :mod:`shutil` methods, which know nothing about + SCons Nodes, will be called to perform the actual copying, + args are converted to strings first. + + If *symlinks* evaluates true, then a symbolic link will be shallow copied and recreated as a symbolic link; otherwise, copying a symbolic link will be equivalent to copying the symbolic link's final target regardless of symbolic link depth. """ dest = str(dest) - src = str(src) + src = [str(n) for n in src] if is_List(src) else str(src) SCons.Node.FS.invalidate_node_memos(dest) - if SCons.Util.is_List(src) and os.path.isdir(dest): + if is_List(src): + # this fails only if dest exists and is not a dir + try: + os.makedirs(dest, exist_ok=True) + except FileExistsError: + raise SCons.Errors.BuildError( + errstr=( + 'Error: Copy() called with a list of sources, ' + 'which requires target to be a directory, ' + f'but "{dest}" is not a directory.' + ) + ) for file in src: shutil.copy2(file, dest) return 0 + elif os.path.islink(src): if symlinks: - return os.symlink(os.readlink(src), dest) - else: - return copy_func(dest, os.path.realpath(src)) + os.symlink(os.readlink(src), dest) + return 0 + + return copy_func(dest, os.path.realpath(src)) + elif os.path.isfile(src): shutil.copy2(src, dest) return 0 + else: shutil.copytree(src, dest, symlinks) - # copytree returns None in python2 and destination string in python3 - # A error is raised in both cases, so we can just return 0 for success return 0 -Copy = ActionFactory( - copy_func, - lambda dest, src, symlinks=True: 'Copy("%s", "%s")' % (dest, src) -) +def copy_strfunc(dest, src, symlinks=True) -> str: + """strfunction for the Copy action function.""" + return f'Copy({get_paths_str(dest)}, {get_paths_str(src)})' + + +Copy = ActionFactory(copy_func, copy_strfunc) + +def delete_func(dest, must_exist=False) -> None: + """Implementation of the Delete action function. -def delete_func(dest, must_exist=0): + Lets the Python :func:`os.unlink` raise an error if *dest* does not exist, + unless *must_exist* evaluates false (the default). + """ SCons.Node.FS.invalidate_node_memos(dest) - if not SCons.Util.is_List(dest): + if not is_List(dest): dest = [dest] for entry in dest: entry = str(entry) @@ -296,39 +338,42 @@ os.unlink(entry) -def delete_strfunc(dest, must_exist=0): - return 'Delete(%s)' % get_paths_str(dest) +def delete_strfunc(dest, must_exist=False) -> str: + """strfunction for the Delete action function.""" + return f'Delete({get_paths_str(dest)})' Delete = ActionFactory(delete_func, delete_strfunc) -def mkdir_func(dest): +def mkdir_func(dest) -> None: + """Implementation of the Mkdir action function.""" SCons.Node.FS.invalidate_node_memos(dest) - if not SCons.Util.is_List(dest): + if not is_List(dest): dest = [dest] for entry in dest: os.makedirs(str(entry), exist_ok=True) -Mkdir = ActionFactory(mkdir_func, - lambda _dir: 'Mkdir(%s)' % get_paths_str(_dir)) +Mkdir = ActionFactory(mkdir_func, lambda _dir: f'Mkdir({get_paths_str(_dir)})') -def move_func(dest, src): +def move_func(dest, src) -> None: + """Implementation of the Move action function.""" SCons.Node.FS.invalidate_node_memos(dest) SCons.Node.FS.invalidate_node_memos(src) shutil.move(src, dest) -Move = ActionFactory(move_func, - lambda dest, src: 'Move("%s", "%s")' % (dest, src), - convert=str) +Move = ActionFactory( + move_func, lambda dest, src: f'Move("{dest}", "{src}")', convert=str +) -def touch_func(dest): +def touch_func(dest) -> None: + """Implementation of the Touch action function.""" SCons.Node.FS.invalidate_node_memos(dest) - if not SCons.Util.is_List(dest): + if not is_List(dest): dest = [dest] for file in dest: file = str(file) @@ -341,8 +386,7 @@ os.utime(file, (atime, mtime)) -Touch = ActionFactory(touch_func, - lambda file: 'Touch(%s)' % get_paths_str(file)) +Touch = ActionFactory(touch_func, lambda file: f'Touch({get_paths_str(file)})') # Internal utility functions @@ -390,7 +434,7 @@ prefix = str(env.subst(prefix, SCons.Subst.SUBST_RAW)) suffix = str(env.subst(suffix, SCons.Subst.SUBST_RAW)) - for x in SCons.Util.flatten(items_iter): + for x in flatten(items_iter): if isinstance(x, SCons.Node.FS.File): result.append(x) continue @@ -436,8 +480,8 @@ else: c = _concat_ixes - stripprefixes = list(map(env.subst, SCons.Util.flatten(stripprefixes))) - stripsuffixes = list(map(env.subst, SCons.Util.flatten(stripsuffixes))) + stripprefixes = list(map(env.subst, flatten(stripprefixes))) + stripsuffixes = list(map(env.subst, flatten(stripsuffixes))) stripped = [] for l in SCons.PathList.PathList(itms).subst_path(env, None, None): @@ -445,7 +489,7 @@ stripped.append(l) continue - if not SCons.Util.is_String(l): + if not is_String(l): l = str(l) for stripprefix in stripprefixes: @@ -467,53 +511,82 @@ return c(prefix, stripped, suffix, env) -def processDefines(defs): - """process defines, resolving strings, lists, dictionaries, into a list of - strings - """ - if SCons.Util.is_List(defs): - l = [] - for d in defs: - if d is None: +def processDefines(defs) -> List[str]: + """Return list of strings for preprocessor defines from *defs*. + + Resolves the different forms ``CPPDEFINES`` can be assembled in: + if the Append/Prepend routines are used beyond a initial setting it + will be a deque, but if written to only once (Environment initializer, + or direct write) it can be a multitude of types. + + Any prefix/suffix is handled elsewhere (usually :func:`_concat_ixes`). + + .. versionchanged:: 4.5.0 + Bare tuples are now treated the same as tuple-in-sequence, assumed + to describe a valued macro. Bare strings are now split on space. + A dictionary is no longer sorted before handling. + """ + dlist = [] + if is_List(defs): + for define in defs: + if define is None: continue - elif SCons.Util.is_List(d) or isinstance(d, tuple): - if len(d) >= 2: - l.append(str(d[0]) + '=' + str(d[1])) + elif is_Sequence(define): + if len(define) > 2: + raise SCons.Errors.UserError( + f"Invalid tuple in CPPDEFINES: {define!r}, " + "must be a tuple with only two elements" + ) + name, *value = define + if value and value[0] is not None: + # TODO: do we need to quote value if it contains space? + dlist.append(f"{name}={value[0]}") else: - l.append(str(d[0])) - elif SCons.Util.is_Dict(d): - for macro, value in d.items(): + dlist.append(str(define[0])) + elif is_Dict(define): + for macro, value in define.items(): if value is not None: - l.append(str(macro) + '=' + str(value)) + # TODO: do we need to quote value if it contains space? + dlist.append(f"{macro}={value}") else: - l.append(str(macro)) - elif SCons.Util.is_String(d): - l.append(str(d)) + dlist.append(str(macro)) + elif is_String(define): + dlist.append(str(define)) else: - raise SCons.Errors.UserError("DEFINE %s is not a list, dict, string or None." % repr(d)) - elif SCons.Util.is_Dict(defs): - # The items in a dictionary are stored in random order, but - # if the order of the command-line options changes from - # invocation to invocation, then the signature of the command - # line will change and we'll get random unnecessary rebuilds. - # Consequently, we have to sort the keys to ensure a - # consistent order... - l = [] - for k, v in sorted(defs.items()): - if v is None: - l.append(str(k)) + raise SCons.Errors.UserError( + f"CPPDEFINES entry {define!r} is not a tuple, list, " + "dict, string or None." + ) + elif is_Tuple(defs): + if len(defs) > 2: + raise SCons.Errors.UserError( + f"Invalid tuple in CPPDEFINES: {defs!r}, " + "must be a tuple with only two elements" + ) + name, *value = defs + if value and value[0] is not None: + # TODO: do we need to quote value if it contains space? + dlist.append(f"{name}={value[0]}") + else: + dlist.append(str(define[0])) + elif is_Dict(defs): + for macro, value in defs.items(): + if value is None: + dlist.append(str(macro)) else: - l.append(str(k) + '=' + str(v)) + dlist.append(f"{macro}={value}") + elif is_String(defs): + return defs.split() else: - l = [str(defs)] - return l + dlist.append(str(defs)) + + return dlist def _defines(prefix, defs, suffix, env, target=None, source=None, c=_concat_ixes): - """A wrapper around _concat_ixes that turns a list or string + """A wrapper around :func:`_concat_ixes` that turns a list or string into a list of C preprocessor command-line definitions. """ - return c(prefix, env.subst_list(processDefines(defs), target=target, source=source), suffix, env) diff -Nru scons-4.4.0+dfsg/SCons/DefaultsTests.py scons-4.5.2+dfsg/SCons/DefaultsTests.py --- scons-4.4.0+dfsg/SCons/DefaultsTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/DefaultsTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -27,7 +27,8 @@ import TestCmd -from SCons.Defaults import mkdir_func, _defines +from SCons.Defaults import mkdir_func, _defines, processDefines +from SCons.Errors import UserError class DummyEnvironment(collections.UserDict): @@ -90,18 +91,66 @@ """ env = DummyEnvironment() - # Neither source or target specified - x = _defines('-D', ['A', 'B', 'C'], 'XYZ', env) - self.assertEqual(x, ['-DAXYZ', '-DBXYZ', '-DCXYZ']) - - # only source specified - y = _defines('-D', ['AA', 'BA', 'CA'], 'XYZA', env, 'XYZ') - self.assertEqual(y, ['-DAAXYZA', '-DBAXYZA', '-DCAXYZA']) - - # source and target specified - z = _defines('-D', ['AAB', 'BAB', 'CAB'], 'XYZAB', env, 'XYZ', 'abc') - self.assertEqual(z,['-DAABXYZAB', '-DBABXYZAB', '-DCABXYZAB']) + with self.subTest(): + # Neither source or target specified + x = _defines('-D', ['A', 'B', 'C'], 'XYZ', env) + self.assertEqual(x, ['-DAXYZ', '-DBXYZ', '-DCXYZ']) + + with self.subTest(): + # only source specified + y = _defines('-D', ['AA', 'BA', 'CA'], 'XYZA', env, 'XYZ') + self.assertEqual(y, ['-DAAXYZA', '-DBAXYZA', '-DCAXYZA']) + + with self.subTest(): + # source and target specified + z = _defines('-D', ['AAB', 'BAB', 'CAB'], 'XYZAB', env, 'XYZ', 'abc') + self.assertEqual(z, ['-DAABXYZAB', '-DBABXYZAB', '-DCABXYZAB']) + def test_processDefines(self): + """Verify correct handling in processDefines.""" + env = DummyEnvironment() + + with self.subTest(): + # macro name only + rv = processDefines('name') + self.assertEqual(rv, ['name']) + + with self.subTest(): + # macro with value + rv = processDefines('name=val') + self.assertEqual(rv, ['name=val']) + + with self.subTest(): + # space-separated macros + rv = processDefines('name1 name2=val2') + self.assertEqual(rv, ['name1', 'name2=val2']) + + with self.subTest(): + # single list + rv = processDefines(['name', 'val']) + self.assertEqual(rv, ['name', 'val']) + + with self.subTest(): + # single tuple + rv = processDefines(('name', 'val')) + self.assertEqual(rv, ['name=val']) + + with self.subTest(): + # single dict + rv = processDefines({'foo': None, 'name': 'val'}) + self.assertEqual(rv, ['foo', 'name=val']) + + with self.subTest(): + # compound list + rv = processDefines(['foo', ('name', 'val'), ['name2', 'val2']]) + self.assertEqual(rv, ['foo', 'name=val', 'name2=val2']) + + with self.subTest(): + # invalid tuple + with self.assertRaises( + UserError, msg="Invalid tuple should throw SCons.Errors.UserError" + ): + rv = processDefines([('name', 'val', 'bad')]) if __name__ == "__main__": diff -Nru scons-4.4.0+dfsg/SCons/Defaults.xml scons-4.5.2+dfsg/SCons/Defaults.xml --- scons-4.4.0+dfsg/SCons/Defaults.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Defaults.xml 2023-03-21 16:17:04.000000000 +0000 @@ -92,68 +92,111 @@ A platform independent specification of C preprocessor macro definitions. -The definitions will be added to command lines +The definitions are added to command lines through the automatically-generated -&cv-link-_CPPDEFFLAGS; &consvar; (see above), +&cv-link-_CPPDEFFLAGS; &consvar;, which is constructed according to -the type of value of &cv-CPPDEFINES;: +the contents of &cv-CPPDEFINES;: + + If &cv-CPPDEFINES; is a string, the values of the &cv-link-CPPDEFPREFIX; and &cv-link-CPPDEFSUFFIX; &consvars; -will be respectively prepended and appended to -each definition in &cv-link-CPPDEFINES;. +are respectively prepended and appended to +each definition in &cv-CPPDEFINES;, +split on whitespace. -# Will add -Dxyz to POSIX compiler command lines, +# Adds -Dxyz to POSIX compiler command lines, # and /Dxyz to Microsoft Visual C++ command lines. env = Environment(CPPDEFINES='xyz') + + If &cv-CPPDEFINES; is a list, the values of the &cv-CPPDEFPREFIX; and &cv-CPPDEFSUFFIX; &consvars; -will be respectively prepended and appended to +are respectively prepended and appended to each element in the list. -If any element is a list or tuple, -then the first item is the name being -defined and the second item is its value: +If any element is a tuple (or list) +then the first item of the tuple is the macro name +and the second is the macro definition. +If the definition is not omitted or None, +the name and definition are combined into a single +name=definition item +before the preending/appending. -# Will add -DB=2 -DA to POSIX compiler command lines, +# Adds -DB=2 -DA to POSIX compiler command lines, # and /DB=2 /DA to Microsoft Visual C++ command lines. env = Environment(CPPDEFINES=[('B', 2), 'A']) + + If &cv-CPPDEFINES; is a dictionary, the values of the &cv-CPPDEFPREFIX; and &cv-CPPDEFSUFFIX; &consvars; -will be respectively prepended and appended to -each item from the dictionary. -The key of each dictionary item -is a name being defined -to the dictionary item's corresponding value; -if the value is -None, -then the name is defined without an explicit value. -Note that the resulting flags are sorted by keyword -to ensure that the order of the options on the -command line is consistent each time -&scons; -is run. +are respectively prepended and appended to +each key from the dictionary. +If the value for a key is not None, +then the key (macro name) and the value +(macros definition) are combined into a single +name=definition item +before the prepending/appending. -# Will add -DA -DB=2 to POSIX compiler command lines, -# and /DA /DB=2 to Microsoft Visual C++ command lines. +# Adds -DA -DB=2 to POSIX compiler command lines, +# or /DA /DB=2 to Microsoft Visual C++ command lines. env = Environment(CPPDEFINES={'B':2, 'A':None}) + + + + +Depending on how contents are added to &cv-CPPDEFINES;, +it may be transformed into a compound type, +for example a list containing strings, tuples and/or dictionaries. +&SCons; can correctly expand such a compound type. + + + +Note that &SCons; may call the compiler via a shell. +If a macro definition contains characters such as spaces that +have meaning to the shell, or is intended to be a string value, +you may need to use the shell's quoting syntax to avoid +interpretation by the shell before the preprocessor sees it. +Function-like macros are not supported via this mechanism +(and some compilers do not even implement that functionality +via the command lines). +When quoting, note that +one set of quote characters are used to define a &Python; string, +then quotes embedded inside that would be consumed by the shell +unless escaped. These examples may help illustrate: + + + +env = Environment(CPPDEFINES=['USE_ALT_HEADER=\\"foo_alt.h\\"']) +env = Environment(CPPDEFINES=[('USE_ALT_HEADER', '\\"foo_alt.h\\"')]) + + + +:Changed in version 4.5: +&SCons; no longer sorts &cv-CPPDEFINES; values entered +in dictionary form. &Python; now preserves dictionary +keys in the order they are entered, so it is no longer +necessary to sort them to ensure a stable command line. + + diff -Nru scons-4.4.0+dfsg/SCons/Environment.py scons-4.5.2+dfsg/SCons/Environment.py --- scons-4.4.0+dfsg/SCons/Environment.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Environment.py 2023-03-21 16:17:04.000000000 +0000 @@ -35,7 +35,7 @@ import sys import re import shlex -from collections import UserDict +from collections import UserDict, deque import SCons.Action import SCons.Builder @@ -65,6 +65,7 @@ flatten, is_Dict, is_List, + is_Scalar, is_Sequence, is_String, is_Tuple, @@ -193,6 +194,206 @@ return result +def _add_cppdefines( + env_dict: dict, + val, # add annotation? + prepend: bool = False, + unique: bool = False, + delete_existing: bool = False, +) -> None: + """Adds to ``CPPDEFINES``, using the rules for C preprocessor macros. + + This is split out from regular construction variable addition because + these entries can express either a macro with a replacement value or + one without. A macro with replacement value can be supplied as *val* + in three ways: as a combined string ``"name=value"``; as a tuple + ``(name, value)``, or as an entry in a dictionary ``{"name": value}``. + A list argument with multiple macros can also be given. + + Additions can be unconditional (duplicates allowed) or uniquing (no dupes). + + Note if a replacement value is supplied, *unique* requires a full + match to decide uniqueness - both the macro name and the replacement. + The inner :func:`_is_in` is used to figure that out. + + Args: + env_dict: the dictionary containing the ``CPPDEFINES`` to be modified. + val: the value to add, can be string, sequence or dict + prepend: whether to put *val* in front or back. + unique: whether to add *val* if it already exists. + delete_existing: if *unique* is true, add *val* after removing previous. + + .. versionadded:: 4.5.0 + """ + + def _add_define(item, defines: deque, prepend: bool = False) -> None: + """Convenience function to prepend/append a single value. + + Sole purpose is to shorten code in the outer function. + """ + if prepend: + defines.appendleft(item) + else: + defines.append(item) + + + def _is_in(item, defines: deque): + """Returns match for *item* if found in *defines*. + + Accounts for type differences: tuple ("FOO", "BAR"), list + ["FOO", "BAR"], string "FOO=BAR" and dict {"FOO": "BAR"} all + differ as far as Python equality comparison is concerned, but + are the same for purposes of creating the preprocessor macro. + Also an unvalued string should match something like ``("FOO", None)``. + Since the caller may wish to remove a matched entry, we need to + return it - cannot remove *item* itself unless it happened to + be an exact (type) match. + + Called from a place we know *defines* is always a deque, and + *item* will not be a dict, so don't need to do much type checking. + If this ends up used more generally, would need to adjust that. + + Note implied assumption that members of a list-valued define + will not be dicts - we cannot actually guarantee this, since + if the initial add is a list its contents are not converted. + """ + def _macro_conv(v) -> list: + """Normalizes a macro to a list for comparisons.""" + if is_Tuple(v): + return list(v) + elif is_String(v): + rv = v.split("=") + if len(rv) == 1: + return [v, None] + return rv + return v + + if item in defines: # cheap check first + return item + + item = _macro_conv(item) + for define in defines: + if item == _macro_conv(define): + return define + + return False + + key = 'CPPDEFINES' + try: + defines = env_dict[key] + except KeyError: + # This is a new entry, just save it as is. Defer conversion to + # preferred type until someone tries to amend the value. + # processDefines has no problem with unconverted values if it + # gets called without any later additions. + if is_String(val): + env_dict[key] = val.split() + else: + env_dict[key] = val + return + + # Convert type of existing to deque (if necessary) to simplify processing + # of additions - inserting at either end is cheap. Deferred conversion + # is also useful in case CPPDEFINES was set initially without calling + # through here (e.g. Environment kwarg, or direct assignment). + if isinstance(defines, deque): + # Already a deque? do nothing. Explicit check is so we don't get + # picked up by the is_list case below. + pass + elif is_String(defines): + env_dict[key] = deque(defines.split()) + elif is_Tuple(defines): + if len(defines) > 2: + raise SCons.Errors.UserError( + f"Invalid tuple in CPPDEFINES: {define!r}, must be a two-tuple" + ) + env_dict[key] = deque([defines]) + elif is_List(defines): + # a little extra work in case the initial container has dict + # item(s) inside it, so those can be matched by _is_in(). + result = deque() + for define in defines: + if is_Dict(define): + result.extend(define.items()) + else: + result.append(define) + env_dict[key] = result + elif is_Dict(defines): + env_dict[key] = deque(defines.items()) + else: + env_dict[key] = deque(defines) + defines = env_dict[key] # in case we reassigned due to conversion + + # now actually do the addition. + if is_Dict(val): + # Unpack the dict while applying to existing + for item in val.items(): + if unique: + match = _is_in(item, defines) + if match and delete_existing: + defines.remove(match) + _add_define(item, defines, prepend) + elif not match: + _add_define(item, defines, prepend) + else: + _add_define(item, defines, prepend) + + elif is_String(val): + for v in val.split(): + if unique: + match = _is_in(v, defines) + if match and delete_existing: + defines.remove(match) + _add_define(v, defines, prepend) + elif not match: + _add_define(v, defines, prepend) + else: + _add_define(v, defines, prepend) + + # A tuple appended to anything should yield -Dkey=value + elif is_Tuple(val): + if len(val) > 2: + raise SCons.Errors.UserError( + f"Invalid tuple added to CPPDEFINES: {val!r}, " + "must be a two-tuple" + ) + if len(val) == 1: + val = (val[0], None) # normalize + if not is_Scalar(val[0]) or not is_Scalar(val[1]): + raise SCons.Errors.UserError( + f"Invalid tuple added to CPPDEFINES: {val!r}, " + "values must be scalar" + ) + if unique: + match = _is_in(val, defines) + if match and delete_existing: + defines.remove(match) + _add_define(val, defines, prepend) + elif not match: + _add_define(val, defines, prepend) + else: + _add_define(val, defines, prepend) + + elif is_List(val): + tmp = [] + for item in val: + if unique: + match = _is_in(item, defines) + if match and delete_existing: + defines.remove(match) + tmp.append(item) + elif not match: + tmp.append(item) + else: + tmp.append(item) + + if prepend: + defines.extendleft(tmp) + else: + defines.extend(tmp) + + # else: # are there any other cases? processDefines doesn't think so. + # The following is partly based on code in a comment added by Peter # Shannon at the following page (there called the "transplant" class): @@ -481,7 +682,7 @@ def lvars(self): return {} - def subst(self, string, raw=0, target=None, source=None, conv=None, executor=None): + def subst(self, string, raw=0, target=None, source=None, conv=None, executor=None, overrides=False): """Recursively interpolates construction variables from the Environment into the specified string, returning the expanded result. Construction variables are specified by a $ prefix @@ -496,7 +697,7 @@ lvars['__env__'] = self if executor: lvars.update(executor.get_lvars()) - return SCons.Subst.scons_subst(string, self, raw, target, source, gvars, lvars, conv) + return SCons.Subst.scons_subst(string, self, raw, target, source, gvars, lvars, conv, overrides=overrides) def subst_kw(self, kw, raw=0, target=None, source=None): nkw = {} @@ -507,7 +708,7 @@ nkw[k] = v return nkw - def subst_list(self, string, raw=0, target=None, source=None, conv=None, executor=None): + def subst_list(self, string, raw=0, target=None, source=None, conv=None, executor=None, overrides=False): """Calls through to SCons.Subst.scons_subst_list(). See the documentation for that function.""" gvars = self.gvars() @@ -515,7 +716,7 @@ lvars['__env__'] = self if executor: lvars.update(executor.get_lvars()) - return SCons.Subst.scons_subst_list(string, self, raw, target, source, gvars, lvars, conv) + return SCons.Subst.scons_subst_list(string, self, raw, target, source, gvars, lvars, conv, overrides=overrides) def subst_path(self, path, target=None, source=None): """Substitute a path list, turning EntryProxies into Nodes @@ -802,7 +1003,7 @@ '-openmp', '-fmerge-all-constants', '-fopenmp', - ): + ) or arg.startswith('-fsanitize'): mapping['CCFLAGS'].append(arg) mapping['LINKFLAGS'].append(arg) elif arg == '-mwindows': @@ -837,11 +1038,17 @@ def MergeFlags(self, args, unique=True) -> None: """Merge flags into construction variables. - Merges the flags from ``args`` into this construction environent. - If ``args`` is not a dict, it is first converted to one with + Merges the flags from *args* into this construction environent. + If *args* is not a dict, it is first converted to one with flags distributed into appropriate construction variables. See :meth:`ParseFlags`. + As a side effect, if *unique* is true, a new object is created + for each modified construction variable by the loop at the end. + This is silently expected by the :meth:`Override` *parse_flags* + functionality, which does not want to share the list (or whatever) + with the environment being overridden. + Args: args: flags to merge unique: merge flags rather than appending (default: True). @@ -876,6 +1083,16 @@ try: orig = orig + value except (KeyError, TypeError): + # If CPPDEFINES is a deque, adding value (a list) + # results in TypeError, so we handle that case here. + # Just in case we got called from Override, make + # sure we make a copy, because we don't go through + # the cleanup loops at the end of the outer for loop, + # which implicitly gives us a new object. + if isinstance(orig, deque): + self[key] = self[key].copy() + self.AppendUnique(CPPDEFINES=value, delete_existing=True) + continue try: add_to_orig = orig.append except AttributeError: @@ -894,6 +1111,7 @@ for v in orig[::-1]: if v not in t: t.insert(0, v) + self[key] = t @@ -1215,16 +1433,15 @@ kw = copy_non_reserved_keywords(kw) for key, val in kw.items(): + if key == 'CPPDEFINES': + _add_cppdefines(self._dict, val) + continue + try: - if key == 'CPPDEFINES' and is_String(self._dict[key]): - self._dict[key] = [self._dict[key]] orig = self._dict[key] except KeyError: # No existing var in the environment, so set to the new value. - if key == 'CPPDEFINES' and is_String(val): - self._dict[key] = [val] - else: - self._dict[key] = val + self._dict[key] = val continue try: @@ -1263,19 +1480,8 @@ # things like UserList will incorrectly coerce the # original dict to a list (which we don't want). if is_List(val): - if key == 'CPPDEFINES': - tmp = [] - for (k, v) in orig.items(): - if v is not None: - tmp.append((k, v)) - else: - tmp.append((k,)) - orig = tmp - orig += val - self._dict[key] = orig - else: - for v in val: - orig[v] = None + for v in val: + orig[v] = None else: try: update_dict(val) @@ -1330,6 +1536,9 @@ """ kw = copy_non_reserved_keywords(kw) for key, val in kw.items(): + if key == 'CPPDEFINES': + _add_cppdefines(self._dict, val, unique=True, delete_existing=delete_existing) + continue if is_List(val): val = _delete_duplicates(val, delete_existing) if key not in self._dict or self._dict[key] in ('', None): @@ -1338,46 +1547,8 @@ self._dict[key].update(val) elif is_List(val): dk = self._dict[key] - if key == 'CPPDEFINES': - tmp = [] - for i in val: - if is_List(i): - if len(i) >= 2: - tmp.append((i[0], i[1])) - else: - tmp.append((i[0],)) - elif is_Tuple(i): - tmp.append(i) - else: - tmp.append((i,)) - val = tmp - # Construct a list of (key, value) tuples. - if is_Dict(dk): - tmp = [] - for (k, v) in dk.items(): - if v is not None: - tmp.append((k, v)) - else: - tmp.append((k,)) - dk = tmp - elif is_String(dk): - dk = [(dk,)] - else: - tmp = [] - for i in dk: - if is_List(i): - if len(i) >= 2: - tmp.append((i[0], i[1])) - else: - tmp.append((i[0],)) - elif is_Tuple(i): - tmp.append(i) - else: - tmp.append((i,)) - dk = tmp - else: - if not is_List(dk): - dk = [dk] + if not is_List(dk): + dk = [dk] if delete_existing: dk = [x for x in dk if x not in val] else: @@ -1386,70 +1557,15 @@ else: dk = self._dict[key] if is_List(dk): - if key == 'CPPDEFINES': - tmp = [] - for i in dk: - if is_List(i): - if len(i) >= 2: - tmp.append((i[0], i[1])) - else: - tmp.append((i[0],)) - elif is_Tuple(i): - tmp.append(i) - else: - tmp.append((i,)) - dk = tmp - # Construct a list of (key, value) tuples. - if is_Dict(val): - tmp = [] - for (k, v) in val.items(): - if v is not None: - tmp.append((k, v)) - else: - tmp.append((k,)) - val = tmp - elif is_String(val): - val = [(val,)] - if delete_existing: - dk = list(filter(lambda x, val=val: x not in val, dk)) - self._dict[key] = dk + val - else: - dk = [x for x in dk if x not in val] - self._dict[key] = dk + val + # By elimination, val is not a list. Since dk is a + # list, wrap val in a list first. + if delete_existing: + dk = list(filter(lambda x, val=val: x not in val, dk)) + self._dict[key] = dk + [val] else: - # By elimination, val is not a list. Since dk is a - # list, wrap val in a list first. - if delete_existing: - dk = list(filter(lambda x, val=val: x not in val, dk)) + if val not in dk: self._dict[key] = dk + [val] - else: - if val not in dk: - self._dict[key] = dk + [val] else: - if key == 'CPPDEFINES': - if is_String(dk): - dk = [dk] - elif is_Dict(dk): - tmp = [] - for (k, v) in dk.items(): - if v is not None: - tmp.append((k, v)) - else: - tmp.append((k,)) - dk = tmp - if is_String(val): - if val in dk: - val = [] - else: - val = [val] - elif is_Dict(val): - tmp = [] - for i,j in val.items(): - if j is not None: - tmp.append((i,j)) - else: - tmp.append(i) - val = tmp if delete_existing: dk = [x for x in dk if x not in val] self._dict[key] = dk + val @@ -1726,6 +1842,9 @@ kw = copy_non_reserved_keywords(kw) for key, val in kw.items(): + if key == 'CPPDEFINES': + _add_cppdefines(self._dict, val, prepend=True) + continue try: orig = self._dict[key] except KeyError: @@ -1815,6 +1934,9 @@ """ kw = copy_non_reserved_keywords(kw) for key, val in kw.items(): + if key == 'CPPDEFINES': + _add_cppdefines(self._dict, val, unique=True, prepend=True, delete_existing=delete_existing) + continue if is_List(val): val = _delete_duplicates(val, not delete_existing) if key not in self._dict or self._dict[key] in ('', None): @@ -1897,6 +2019,11 @@ return self.fs.Dir(self.subst(tp)).srcnode().get_abspath() def Tool(self, tool, toolpath=None, **kwargs) -> SCons.Tool.Tool: + """Find and run tool module *tool*. + + .. versionchanged:: 4.2 + returns the tool module rather than ``None``. + """ if is_String(tool): tool = self.subst(tool) if toolpath is None: @@ -2335,7 +2462,10 @@ return [self.subst(arg)] def Value(self, value, built_value=None, name=None): - """ + """Return a Value (Python expression) node. + + .. versionchanged:: 4.0 + the *name* parameter was added. """ return SCons.Node.Python.ValueWithMemo(value, built_value, name) @@ -2344,9 +2474,8 @@ src_dir = self.arg2nodes(src_dir, self.fs.Dir)[0] self.fs.VariantDir(variant_dir, src_dir, duplicate) - def FindSourceFiles(self, node='.'): - """ returns a list of all source files. - """ + def FindSourceFiles(self, node='.') -> list: + """Return a list of all source files.""" node = self.arg2nodes(node, self.fs.Entry)[0] sources = [] diff -Nru scons-4.4.0+dfsg/SCons/EnvironmentTests.py scons-4.5.2+dfsg/SCons/EnvironmentTests.py --- scons-4.4.0+dfsg/SCons/EnvironmentTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/EnvironmentTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -28,7 +28,7 @@ import os import sys import unittest -from collections import UserDict as UD, UserList as UL +from collections import UserDict as UD, UserList as UL, deque import TestCmd @@ -820,6 +820,8 @@ "--param l1-cache-size=32 --param l2-cache-size=6144 " "+DD64 " "-DFOO -DBAR=value -D BAZ " + "-fsanitize=memory " + "-fsanitize-address-use-after-return " ) d = env.ParseFlags(s) @@ -836,7 +838,9 @@ ('-imacros', env.fs.File('/usr/include/foo4')), ('-include', env.fs.File('/usr/include/foo5')), ('--param', 'l1-cache-size=32'), ('--param', 'l2-cache-size=6144'), - '+DD64'], repr(d['CCFLAGS']) + '+DD64', + '-fsanitize=memory', + '-fsanitize-address-use-after-return'], repr(d['CCFLAGS']) assert d['CXXFLAGS'] == ['-std=c++0x'], repr(d['CXXFLAGS']) assert d['CPPDEFINES'] == ['FOO', ['BAR', 'value'], 'BAZ'], d['CPPDEFINES'] assert d['CPPFLAGS'] == ['-Wp,-cpp'], d['CPPFLAGS'] @@ -856,7 +860,9 @@ '-mno-cygwin', '-mwindows', ('-arch', 'i386'), ('-isysroot', '/tmp'), - '+DD64'], repr(d['LINKFLAGS']) + '+DD64', + '-fsanitize=memory', + '-fsanitize-address-use-after-return'], repr(d['LINKFLAGS']) assert d['RPATH'] == ['rpath1', 'rpath2', 'rpath3'], d['RPATH'] @@ -896,6 +902,11 @@ assert env['A'] == ['aaa'], env['A'] assert env['B'] == ['b', 'bb', 'bbb'], env['B'] + # issue #4231: CPPDEFINES can be a deque, tripped up merge logic + env = Environment(CPPDEFINES=deque(['aaa', 'bbb'])) + env.MergeFlags({'CPPDEFINES': 'ccc'}) + self.assertEqual(env['CPPDEFINES'], deque(['aaa', 'bbb', 'ccc'])) + # issue #3665: if merging dict which is a compound object # (i.e. value can be lists, etc.), the value object should not # be modified. per the issue, this happened if key not in env. @@ -1815,147 +1826,189 @@ updates and check that the original remains intact and the copy has the updated values. """ - env1 = self.TestEnvironment(XXX='x', YYY='y') - env2 = env1.Clone() - env1copy = env1.Clone() - assert env1copy == env1 - assert env2 == env1 - env2.Replace(YYY = 'yyy') - assert env1 != env2 - assert env1 == env1copy - - env3 = env1.Clone(XXX='x3', ZZZ='z3') - assert env3 != env1 - assert env3.Dictionary('XXX') == 'x3' - assert env1.Dictionary('XXX') == 'x' - assert env3.Dictionary('YYY') == 'y' - assert env3.Dictionary('ZZZ') == 'z3' - assert env1 == env1copy + with self.subTest(): + env1 = self.TestEnvironment(XXX='x', YYY='y') + env2 = env1.Clone() + env1copy = env1.Clone() + self.assertEqual(env1copy, env1) + self.assertEqual(env2, env1) + env2.Replace(YYY = 'yyy') + self.assertNotEqual(env1, env2) + self.assertEqual(env1, env1copy) + + env3 = env1.Clone(XXX='x3', ZZZ='z3') + self.assertNotEqual(env3, env1) + self.assertEqual(env3.Dictionary('XXX'), 'x3') + self.assertEqual(env1.Dictionary('XXX'), 'x') + self.assertEqual(env3.Dictionary('YYY'), 'y') + self.assertEqual(env3.Dictionary('ZZZ'), 'z3') + self.assertRaises(KeyError, env1.Dictionary, 'ZZZ') # leak test + self.assertEqual(env1, env1copy) # Ensure that lists and dictionaries are deep copied, but not instances - class TestA: - pass + with self.subTest(): + + class TestA: + pass - env1 = self.TestEnvironment(XXX=TestA(), YYY=[1, 2, 3], ZZZ={1: 2, 3: 4}) - env2 = env1.Clone() - env2.Dictionary('YYY').append(4) - env2.Dictionary('ZZZ')[5] = 6 - assert env1.Dictionary('XXX') is env2.Dictionary('XXX') - assert 4 in env2.Dictionary('YYY') - assert 4 not in env1.Dictionary('YYY') - assert 5 in env2.Dictionary('ZZZ') - assert 5 not in env1.Dictionary('ZZZ') - - # - env1 = self.TestEnvironment(BUILDERS={'b1': Builder()}) - assert hasattr(env1, 'b1'), "env1.b1 was not set" - assert env1.b1.object == env1, "b1.object doesn't point to env1" - env2 = env1.Clone(BUILDERS = {'b2' : Builder()}) - assert env2 != env1 - assert hasattr(env1, 'b1'), "b1 was mistakenly cleared from env1" - assert env1.b1.object == env1, "b1.object was changed" - assert not hasattr(env2, 'b1'), "b1 was not cleared from env2" - assert hasattr(env2, 'b2'), "env2.b2 was not set" - assert env2.b2.object == env2, "b2.object doesn't point to env2" + env1 = self.TestEnvironment( + XXX=TestA(), + YYY=[1, 2, 3], + ZZZ={1: 2, 3: 4} + ) + env2 = env1.Clone() + env2.Dictionary('YYY').append(4) + env2.Dictionary('ZZZ')[5] = 6 + self.assertIs(env1.Dictionary('XXX'), env2.Dictionary('XXX')) + self.assertIn(4, env2.Dictionary('YYY')) + self.assertNotIn(4, env1.Dictionary('YYY')) + self.assertIn(5, env2.Dictionary('ZZZ')) + self.assertNotIn(5, env1.Dictionary('ZZZ')) + + # We also need to look at the special cases in semi_deepcopy() + # used when cloning - these should not leak to the original either + with self.subTest(): + env1 = self.TestEnvironment( + XXX=deque([1, 2, 3]), + YYY=UL([1, 2, 3]), + ZZZ=UD({1: 2, 3: 4}), + ) + env2 = env1.Clone() + env2['XXX'].append(4) + env2['YYY'].append(4) + env2['ZZZ'][5] = 6 + self.assertIn(4, env2['XXX']) + self.assertNotIn(4, env1['XXX']) + self.assertIn(4, env2['YYY']) + self.assertNotIn(4, env1['YYY']) + self.assertIn(5, env2['ZZZ']) + self.assertNotIn(5, env1['ZZZ']) + + # BUILDERS is special... + with self.subTest(): + env1 = self.TestEnvironment(BUILDERS={'b1': Builder()}) + assert hasattr(env1, 'b1'), "env1.b1 was not set" + assert env1.b1.object == env1, "b1.object doesn't point to env1" + env2 = env1.Clone(BUILDERS = {'b2' : Builder()}) + assert env2 != env1 + assert hasattr(env1, 'b1'), "b1 was mistakenly cleared from env1" + assert env1.b1.object == env1, "b1.object was changed" + assert not hasattr(env2, 'b1'), "b1 was not cleared from env2" + assert hasattr(env2, 'b2'), "env2.b2 was not set" + assert env2.b2.object == env2, "b2.object doesn't point to env2" # Ensure that specifying new tools in a copied environment works. - def foo(env): env['FOO'] = 1 - def bar(env): env['BAR'] = 2 - def baz(env): env['BAZ'] = 3 - env1 = self.TestEnvironment(tools=[foo]) - env2 = env1.Clone() - env3 = env1.Clone(tools=[bar, baz]) - - assert env1.get('FOO') == 1 - assert env1.get('BAR') is None - assert env1.get('BAZ') is None - assert env2.get('FOO') == 1 - assert env2.get('BAR') is None - assert env2.get('BAZ') is None - assert env3.get('FOO') == 1 - assert env3.get('BAR') == 2 - assert env3.get('BAZ') == 3 + with self.subTest(): + + def foo(env): + env['FOO'] = 1 + + def bar(env): + env['BAR'] = 2 + + def baz(env): + env['BAZ'] = 3 + + env1 = self.TestEnvironment(tools=[foo]) + env2 = env1.Clone() + env3 = env1.Clone(tools=[bar, baz]) + + assert env1.get('FOO') == 1 + assert env1.get('BAR') is None + assert env1.get('BAZ') is None + assert env2.get('FOO') == 1 + assert env2.get('BAR') is None + assert env2.get('BAZ') is None + assert env3.get('FOO') == 1 + assert env3.get('BAR') == 2 + assert env3.get('BAZ') == 3 # Ensure that recursive variable substitution when copying # environments works properly. - env1 = self.TestEnvironment(CCFLAGS = '-DFOO', XYZ = '-DXYZ') - env2 = env1.Clone(CCFLAGS = '$CCFLAGS -DBAR', - XYZ = ['-DABC', 'x $XYZ y', '-DDEF']) - x = env2.get('CCFLAGS') - assert x == '-DFOO -DBAR', x - x = env2.get('XYZ') - assert x == ['-DABC', 'x -DXYZ y', '-DDEF'], x + with self.subTest(): + env1 = self.TestEnvironment(CCFLAGS='-DFOO', XYZ='-DXYZ') + env2 = env1.Clone( + CCFLAGS='$CCFLAGS -DBAR', XYZ=['-DABC', 'x $XYZ y', '-DDEF'] + ) + x = env2.get('CCFLAGS') + assert x == '-DFOO -DBAR', x + x = env2.get('XYZ') + assert x == ['-DABC', 'x -DXYZ y', '-DDEF'], x # Ensure that special properties of a class don't get # lost on copying. - env1 = self.TestEnvironment(FLAGS = CLVar('flag1 flag2')) - x = env1.get('FLAGS') - assert x == ['flag1', 'flag2'], x - env2 = env1.Clone() - env2.Append(FLAGS = 'flag3 flag4') - x = env2.get('FLAGS') - assert x == ['flag1', 'flag2', 'flag3', 'flag4'], x - x = env1.get('FLAGS') - assert x == ['flag1', 'flag2'], x + with self.subTest(): + env1 = self.TestEnvironment(FLAGS=CLVar('flag1 flag2')) + x = env1.get('FLAGS') + assert x == ['flag1', 'flag2'], x + env2 = env1.Clone() + env2.Append(FLAGS='flag3 flag4') + x = env2.get('FLAGS') + assert x == ['flag1', 'flag2', 'flag3', 'flag4'], x + x = env1.get('FLAGS') + assert x == ['flag1', 'flag2'], x # Ensure that appending directly to a copied CLVar # doesn't modify the original. - env1 = self.TestEnvironment(FLAGS = CLVar('flag1 flag2')) - x = env1.get('FLAGS') - assert x == ['flag1', 'flag2'], x - env2 = env1.Clone() - env2['FLAGS'] += ['flag3', 'flag4'] - x = env2.get('FLAGS') - assert x == ['flag1', 'flag2', 'flag3', 'flag4'], x - x = env1.get('FLAGS') - assert x == ['flag1', 'flag2'], x + with self.subTest(): + env1 = self.TestEnvironment(FLAGS=CLVar('flag1 flag2')) + x = env1.get('FLAGS') + assert x == ['flag1', 'flag2'], x + env2 = env1.Clone() + env2['FLAGS'] += ['flag3', 'flag4'] + x = env2.get('FLAGS') + assert x == ['flag1', 'flag2', 'flag3', 'flag4'], x + x = env1.get('FLAGS') + assert x == ['flag1', 'flag2'], x # Test that the environment stores the toolpath and # re-uses it for copies. - test = TestCmd.TestCmd(workdir = '') + with self.subTest(): + test = TestCmd.TestCmd(workdir='') - test.write('xxx.py', """\ + test.write('xxx.py', """\ def exists(env): return True def generate(env): env['XXX'] = 'one' """) - test.write('yyy.py', """\ + test.write('yyy.py', """\ def exists(env): return True def generate(env): env['YYY'] = 'two' """) - env = self.TestEnvironment(tools=['xxx'], toolpath=[test.workpath('')]) - assert env['XXX'] == 'one', env['XXX'] - env = env.Clone(tools=['yyy']) - assert env['YYY'] == 'two', env['YYY'] - + env = self.TestEnvironment(tools=['xxx'], toolpath=[test.workpath('')]) + assert env['XXX'] == 'one', env['XXX'] + env = env.Clone(tools=['yyy']) + assert env['YYY'] == 'two', env['YYY'] # Test that - real_value = [4] - - def my_tool(env, rv=real_value): - assert env['KEY_THAT_I_WANT'] == rv[0] - env['KEY_THAT_I_WANT'] = rv[0] + 1 - - env = self.TestEnvironment() + with self.subTest(): + real_value = [4] - real_value[0] = 5 - env = env.Clone(KEY_THAT_I_WANT=5, tools=[my_tool]) - assert env['KEY_THAT_I_WANT'] == real_value[0], env['KEY_THAT_I_WANT'] - - real_value[0] = 6 - env = env.Clone(KEY_THAT_I_WANT=6, tools=[my_tool]) - assert env['KEY_THAT_I_WANT'] == real_value[0], env['KEY_THAT_I_WANT'] + def my_tool(env, rv=real_value): + assert env['KEY_THAT_I_WANT'] == rv[0] + env['KEY_THAT_I_WANT'] = rv[0] + 1 + + env = self.TestEnvironment() + + real_value[0] = 5 + env = env.Clone(KEY_THAT_I_WANT=5, tools=[my_tool]) + assert env['KEY_THAT_I_WANT'] == real_value[0], env['KEY_THAT_I_WANT'] + + real_value[0] = 6 + env = env.Clone(KEY_THAT_I_WANT=6, tools=[my_tool]) + assert env['KEY_THAT_I_WANT'] == real_value[0], env['KEY_THAT_I_WANT'] # test for pull request #150 - env = self.TestEnvironment() - env._dict.pop('BUILDERS') - assert ('BUILDERS' in env) is False - env2 = env.Clone() + with self.subTest(): + env = self.TestEnvironment() + env._dict.pop('BUILDERS') + assert ('BUILDERS' in env) is False + env2 = env.Clone() def test_Detect(self): """Test Detect()ing tools""" @@ -2119,7 +2172,7 @@ ('-isystem', '/usr/include/foo2'), ('-idirafter', '/usr/include/foo3'), '+DD64'], env['CCFLAGS'] - assert env['CPPDEFINES'] == ['FOO', ['BAR', 'value']], env['CPPDEFINES'] + self.assertEqual(list(env['CPPDEFINES']), ['FOO', ['BAR', 'value']]) assert env['CPPFLAGS'] == ['', '-Wp,-cpp'], env['CPPFLAGS'] assert env['CPPPATH'] == ['string', '/usr/include/fum', 'bar'], env['CPPPATH'] assert env['FRAMEWORKPATH'] == ['fwd1', 'fwd2', 'fwd3'], env['FRAMEWORKPATH'] @@ -3614,10 +3667,10 @@ env = Environment(tools=[], CCFLAGS=None, parse_flags = '-Y') assert env['CCFLAGS'] == ['-Y'], env['CCFLAGS'] - env = Environment(tools=[], CPPDEFINES = 'FOO', parse_flags = '-std=c99 -X -DBAR') + env = Environment(tools=[], CPPDEFINES='FOO', parse_flags='-std=c99 -X -DBAR') assert env['CFLAGS'] == ['-std=c99'], env['CFLAGS'] assert env['CCFLAGS'] == ['-X'], env['CCFLAGS'] - assert env['CPPDEFINES'] == ['FOO', 'BAR'], env['CPPDEFINES'] + self.assertEqual(list(env['CPPDEFINES']), ['FOO', 'BAR']) def test_clone_parse_flags(self): """Test the env.Clone() parse_flags argument""" @@ -3639,8 +3692,7 @@ assert 'CCFLAGS' not in env assert env2['CCFLAGS'] == ['-X'], env2['CCFLAGS'] assert env['CPPDEFINES'] == 'FOO', env['CPPDEFINES'] - assert env2['CPPDEFINES'] == ['FOO','BAR'], env2['CPPDEFINES'] - + self.assertEqual(list(env2['CPPDEFINES']), ['FOO','BAR']) class OverrideEnvironmentTestCase(unittest.TestCase,TestEnvironmentFixture): @@ -3930,15 +3982,16 @@ assert env['CCFLAGS'] is None, env['CCFLAGS'] assert env2['CCFLAGS'] == ['-Y'], env2['CCFLAGS'] - env = SubstitutionEnvironment(CPPDEFINES = 'FOO') - env2 = env.Override({'parse_flags' : '-std=c99 -X -DBAR'}) + env = SubstitutionEnvironment(CPPDEFINES='FOO') + env2 = env.Override({'parse_flags': '-std=c99 -X -DBAR'}) assert 'CFLAGS' not in env assert env2['CFLAGS'] == ['-std=c99'], env2['CFLAGS'] assert 'CCFLAGS' not in env assert env2['CCFLAGS'] == ['-X'], env2['CCFLAGS'] + # make sure they are independent + self.assertIsNot(env['CPPDEFINES'], env2['CPPDEFINES']) assert env['CPPDEFINES'] == 'FOO', env['CPPDEFINES'] - assert env2['CPPDEFINES'] == ['FOO','BAR'], env2['CPPDEFINES'] - + self.assertEqual(list(env2['CPPDEFINES']), ['FOO','BAR']) class NoSubstitutionProxyTestCase(unittest.TestCase,TestEnvironmentFixture): diff -Nru scons-4.4.0+dfsg/SCons/Environment.xml scons-4.5.2+dfsg/SCons/Environment.xml --- scons-4.4.0+dfsg/SCons/Environment.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Environment.xml 2023-03-21 16:17:04.000000000 +0000 @@ -499,126 +499,205 @@ -Intelligently append values to &consvars; in the &consenv; -named by env. +Appends value(s) intelligently to &consvars; in +env. The &consvars; and values to add to them are passed as key=val pairs (&Python; keyword arguments). &f-env-Append; is designed to allow adding values -without normally having to know the data type of an existing &consvar;. +without having to think about the data type of an existing &consvar;. Regular &Python; syntax can also be used to manipulate the &consvar;, -but for that you must know the type of the &consvar;: -for example, different &Python; syntax is needed to combine -a list of values with a single string value, or vice versa. +but for that you may need to know the types involved, +for example pure &Python; lets you directly "add" two lists of strings, +but adding a string to a list or a list to a string requires +different syntax - things &f-Append; takes care of. Some pre-defined &consvars; do have type expectations -based on how &SCons; will use them, -for example &cv-link-CPPDEFINES; is normally a string or a list of strings, -but can be a string, -a list of strings, -a list of tuples, -or a dictionary, while &cv-link-LIBEMITTER; -would expect a callable or list of callables, -and &cv-link-BUILDERS; would expect a mapping type. +based on how &SCons; will use them: +for example &cv-link-CPPDEFINES; is often a string or a list of strings, +but can also be a list of tuples or a dictionary; +while &cv-link-LIBEMITTER; +is expected to be a callable or list of callables, +and &cv-link-BUILDERS; is expected to be a dictionary. Consult the documentation for the various &consvars; for more details. -The following descriptions apply to both the append -and prepend functions, the only difference being -the insertion point of the added values. - - -If env. does not have a &consvar; -indicated by key, -val -is added to the environment under that key as-is. +The following descriptions apply to both the &f-Append; +and &f-Prepend; methods, as well as their +Unique variants, +with the differences being the insertion point of the added values +and whether duplication is allowed. -val can be almost any type, -and &SCons; will combine it with an existing value into an appropriate type, -but there are a few special cases to be aware of. -When two strings are combined, -the result is normally a new string, -with the caller responsible for supplying any needed separation. -The exception to this is the &consvar; &cv-link-CPPDEFINES;, -in which each item will be postprocessed by adding a prefix -and/or suffix, -so the contents are treated as a list of strings, that is, -adding a string will result in a separate string entry, -not a combined string. For &cv-CPPDEFINES; as well as -for &cv-link-LIBS;, and the various *PATH; -variables, &SCons; will supply the compiler-specific -syntax (e.g. adding a -D or /D -prefix for &cv-CPPDEFINES;), so this syntax should be omitted when +val can be almost any type. +If env does not have a &consvar; +named key, +then key is simply +stored with a value of val. +Otherwise, val is +combinined with the existing value, +possibly converting into an appropriate type +which can hold the expanded contents. +There are a few special cases to be aware of. +Normally, when two strings are combined, +the result is a new string containing their concatenation +(and you are responsible for supplying any needed separation); +however, the contents of &cv-link-CPPDEFINES; will +will be postprocessed by adding a prefix and/or suffix +to each entry when the command line is produced, +so &SCons; keeps them separate - +appending a string will result in a separate string entry, +not a combined string. +For &cv-CPPDEFINES;. as well as +&cv-link-LIBS;, and the various *PATH variables, +&SCons; will amend the variable by supplying the compiler-specific +syntax (e.g. prepending a -D or /D +prefix for &cv-CPPDEFINES;), so you should omit this syntax when adding values to these variables. -Example (gcc syntax shown in the expansion of &CPPDEFINES;): +Examples (gcc syntax shown in the expansion of &CPPDEFINES;): env = Environment(CXXFLAGS="-std=c11", CPPDEFINES="RELEASE") -print("CXXFLAGS={}, CPPDEFINES={}".format(env['CXXFLAGS'], env['CPPDEFINES'])) -# notice including a leading space in CXXFLAGS value +print(f"CXXFLAGS = {env['CXXFLAGS']}, CPPDEFINES = {env['CPPDEFINES']}") +# notice including a leading space in CXXFLAGS addition env.Append(CXXFLAGS=" -O", CPPDEFINES="EXTRA") -print("CXXFLAGS={}, CPPDEFINES={}".format(env['CXXFLAGS'], env['CPPDEFINES'])) -print("CPPDEFINES will expand to {}".format(env.subst("$_CPPDEFFLAGS"))) +print(f"CXXFLAGS = {env['CXXFLAGS']}, CPPDEFINES = {env['CPPDEFINES']}") +print("CPPDEFINES will expand to", env.subst('$_CPPDEFFLAGS')) $ scons -Q -CXXFLAGS=-std=c11, CPPDEFINES=RELEASE -CXXFLAGS=-std=c11 -O, CPPDEFINES=['RELEASE', 'EXTRA'] +CXXFLAGS = -std=c11, CPPDEFINES = RELEASE +CXXFLAGS = -std=c11 -O, CPPDEFINES = deque(['RELEASE', 'EXTRA']) CPPDEFINES will expand to -DRELEASE -DEXTRA scons: `.' is up to date. -Because &cv-link-CPPDEFINES; is intended to -describe C/C++ pre-processor macro definitions, -it accepts additional syntax. -Preprocessor macros can be valued, or un-valued, as in --DBAR=1 or --DFOO. -The macro can be be supplied as a complete string including the value, -or as a tuple (or list) of macro, value, or as a dictionary. -Example (again gcc syntax in the expanded defines): +Because &cv-link-CPPDEFINES; is intended for command-line +specification of C/C++ preprocessor macros, +additional syntax is accepted when adding to it. +The preprocessor accepts arguments to predefine a macro name by itself +(-DFOO for most compilers, +/DFOO for Microsoft C++), +which gives it an implicit value of 1, +or can be given with a replacement value +(-DBAR=TEXT). +&SCons; follows these rules when adding to &cv-CPPDEFINES;: + + + +A string is split on spaces, +giving an easy way to enter multiple macros in one addition. +Use an = to specify a valued macro. + + +A tuple is treated as a valued macro. +Use the value None if the macro should not have a value. +It is an error to supply more than two elements in such a tuple. + + +A list is processed in order, +adding each item without further interpretation. +In this case, space-separated strings are not split. + + +A dictionary is processed in order, +adding each key:value pair as a valued macro. +Use the value None if the macro should not have a value. + + + + + +Examples: env = Environment(CPPDEFINES="FOO") -print("CPPDEFINES={}".format(env['CPPDEFINES'])) +print("CPPDEFINES =", env['CPPDEFINES']) env.Append(CPPDEFINES="BAR=1") -print("CPPDEFINES={}".format(env['CPPDEFINES'])) -env.Append(CPPDEFINES=("OTHER", 2)) -print("CPPDEFINES={}".format(env['CPPDEFINES'])) +print("CPPDEFINES =", env['CPPDEFINES']) +env.Append(CPPDEFINES=[("OTHER", 2)]) +print("CPPDEFINES =", env['CPPDEFINES']) env.Append(CPPDEFINES={"EXTRA": "arg"}) -print("CPPDEFINES={}".format(env['CPPDEFINES'])) -print("CPPDEFINES will expand to {}".format(env.subst("$_CPPDEFFLAGS"))) +print("CPPDEFINES =", env['CPPDEFINES']) +print("CPPDEFINES will expand to", env.subst('$_CPPDEFFLAGS')) $ scons -Q -CPPDEFINES=FOO -CPPDEFINES=['FOO', 'BAR=1'] -CPPDEFINES=['FOO', 'BAR=1', ('OTHER', 2)] -CPPDEFINES=['FOO', 'BAR=1', ('OTHER', 2), {'EXTRA': 'arg'}] +CPPDEFINES = FOO +CPPDEFINES = deque(['FOO', 'BAR=1']) +CPPDEFINES = deque(['FOO', 'BAR=1', ('OTHER', 2)]) +CPPDEFINES = deque(['FOO', 'BAR=1', ('OTHER', 2), ('EXTRA', 'arg')]) CPPDEFINES will expand to -DFOO -DBAR=1 -DOTHER=2 -DEXTRA=arg scons: `.' is up to date. -Adding a string val -to a dictonary &consvar; will enter -val as the key in the dict, +Examples of adding multiple macros: + + + +env = Environment() +env.Append(CPPDEFINES=[("ONE", 1), "TWO", ("THREE", )]) +print("CPPDEFINES =", env['CPPDEFINES']) +env.Append(CPPDEFINES={"FOUR": 4, "FIVE": None}) +print("CPPDEFINES =", env['CPPDEFINES']) +print("CPPDEFINES will expand to", env.subst('$_CPPDEFFLAGS')) + + + +$ scons -Q +CPPDEFINES = [('ONE', 1), 'TWO', ('THREE',)] +CPPDEFINES = deque([('ONE', 1), 'TWO', ('THREE',), ('FOUR', 4), ('FIVE', None)]) +CPPDEFINES will expand to -DONE=1 -DTWO -DTHREE -DFOUR=4 -DFIVE +scons: `.' is up to date. + + + +Changed in version 4.5: +clarifined the use of tuples vs. other types, +handling is now consistent across the four functions. + + + +env = Environment() +env.Append(CPPDEFINES=("MACRO1", "MACRO2")) +print("CPPDEFINES =", env['CPPDEFINES']) +env.Append(CPPDEFINES=[("MACRO3", "MACRO4")]) +print("CPPDEFINES =", env['CPPDEFINES']) +print("CPPDEFINES will expand to", env.subst('$_CPPDEFFLAGS')) + + + +$ scons -Q +CPPDEFINES = ('MACRO1', 'MACRO2') +CPPDEFINES = deque(['MACRO1', 'MACRO2', ('MACRO3', 'MACRO4')]) +CPPDEFINES will expand to -DMACRO1 -DMACRO2 -DMACRO3=MACRO4 +scons: `.' is up to date. + + + +See &cv-link-CPPDEFINES; for more details. + + + +Appending a string val +to a dictonary-typed &consvar; enters +val as the key in the dictionary, and None as its value. -Using a tuple type to supply a key + value only works -for the special case of &cv-link-CPPDEFINES; +Using a tuple type to supply a key, value +only works for the special case of &cv-CPPDEFINES; described above. Although most combinations of types work without needing to know the details, some combinations -do not make sense and a &Python; exception will be raised. +do not make sense and &Python; raises an exception. @@ -626,7 +705,7 @@ which are path specifications (conventionally, the names of such end in PATH), it is recommended to add the values as a list of strings, -even if there is only a single string to add. +even if you are only adding a single string. The same goes for adding library names to &cv-LIBS;. @@ -696,20 +775,20 @@ -(key=val, [...], delete_existing=False) +(key=val, [...], [delete_existing=False]) Append values to &consvars; in the current &consenv;, maintaining uniqueness. -Works like &f-link-env-Append; (see for details), -except that values already present in the &consvar; -will not be added again. +Works like &f-link-env-Append;, +except that values that would become duplicates +are not added. If delete_existing -is True, -the existing matching value is first removed, -and the requested value is added, -having the effect of moving such values to the end. +is set to a true value, then for any duplicate, +the existing instance of val is first removed, +then val is appended, +having the effect of moving it to the end. @@ -1901,48 +1980,81 @@ -(pattern, [ondisk, source, strings, exclude]) +(pattern, [ondisk=True, source=False, strings=False, exclude=None]) -Returns Nodes (or strings) that match the specified -pattern, -relative to the directory of the current -&SConscript; -file. +Returns a possibly empty list of Nodes (or strings) that match +pathname specification pattern. +pattern can be absolute, +top-relative, +or (most commonly) relative to the directory of the current +&SConscript; file. +&f-Glob; matches both files stored on disk and Nodes +which &SCons; already knows about, even if any corresponding +file is not currently stored on disk. The evironment method form (&f-env-Glob;) performs string substition on pattern -and returns whatever matches -the resulting expanded pattern. +and returns whatever matches the resulting expanded pattern. +The results are sorted, unlike for the similar &Python; +glob.glob function, +to ensure build order will be stable. -The specified pattern -uses Unix shell style metacharacters for matching: +can contain POSIX-style shell metacharacters for matching: - - * matches everything - ? matches any single character - [seq] matches any character in seq - [!seq] matches any char not in seq - + + + + + Pattern + Meaning + + + + + * + matches everything + + + ? + matches any single character + + + [seq] + matches any character in seq + (can be a list or a range). + + + [!seq] + matches any character not in seq + + + + + + +For a literal match, wrap the metacharacter in brackets to +escape the normal behavior. +For example, '[?]' matches the character +'?'. + -If the first character of a filename is a dot, -it must be matched explicitly. -Character matches do -not -span directory separators. +Filenames starting with a dot are specially handled - +they can only be matched by patterns that start with a dot +(or have a dot immediately following a pathname separator +character, or slash), they are not not matched by the metacharacters. +Metacharacter matches also do not span directory separators. -The &f-Glob; -knows about -repositories +understands repositories (see the &f-link-Repository; function) @@ -1950,8 +2062,7 @@ (see the &f-link-VariantDir; function) -and -returns a Node (or string, if so configured) +and returns a Node (or string, if so configured) match in the local (SConscript) directory if a matching Node is found anywhere in a corresponding @@ -1959,65 +2070,60 @@ -The +If the optional ondisk -argument may be set to a value which evaluates -False -to disable the search for matches on disk, -thereby only returning matches among -already-configured File or Dir Nodes. -The default behavior is to -return corresponding Nodes -for any on-disk matches found. +argument evaluates false, +the search for matches on disk is disabled, +and only matches from +already-configured File or Dir Nodes are returned. +The default is to return Nodes for +matches on disk as well. -The +If the optional source -argument may be set to a value which evaluates -True -to specify that, -when the local directory is a -&f-VariantDir;, -the returned Nodes should be from the -corresponding source directory, -not the local directory. +argument evaluates true, +and the local directory is a variant directory, +then &f-Glob; returnes Nodes from +the corresponding source directory, +rather than the local directory. + -The +If the optional strings -argument may be set to a value which evaluates -True -to have the +argument evaluates true, &f-Glob; -function return strings, not Nodes, -that represent the matched files or directories. +returns matches as strings, rather than Nodes. The returned strings will be relative to the local (SConscript) directory. -(Note that This may make it easier to perform +(Note that while this may make it easier to perform arbitrary manipulation of file names, -but if the returned strings are +it loses the context &SCons; would have in the Node, +so if the returned strings are passed to a different &SConscript; file, -any Node translation will be relative -to the other +any Node translation there will be relative +to that &SConscript; directory, -not the original +not to the original &SConscript; directory.) -The +The optional exclude argument may be set to a pattern or a list of patterns -(following the same Unix shell semantics) -which must be filtered out of returned elements. -Elements matching a least one pattern of -this list will be excluded. +descibing files or directories +to filter out of the match list. +Elements matching a least one specified pattern will be excluded. +These patterns use the same syntax as for +pattern. @@ -2027,9 +2133,10 @@ Program("foo", Glob("*.c")) Zip("/tmp/everything", Glob(".??*") + Glob("*")) -sources = Glob("*.cpp", exclude=["os_*_specific_*.cpp"]) + \ - Glob( "os_%s_specific_*.cpp" % currentOS) +sources = Glob("*.cpp", exclude=["os_*_specific_*.cpp"]) \ + + Glob("os_%s_specific_*.cpp" % currentOS) + @@ -2066,24 +2173,26 @@ -The specified dependency file(s) -will be ignored when deciding if -the target file(s) need to be rebuilt. - - - -You can also use -&f-Ignore; -to remove a target from the default build. -In order to do this you must specify the directory the target will -be built in as the target, and the file you want to skip building -as the dependency. +Ignores dependency +when deciding if +target needs to be rebuilt. +target and +dependency +can each be a single filename or Node +or a list of filenames or Nodes. -Note that this will only remove the dependencies listed from -the files built by default. It will still be built if that -dependency is needed by another object being built. +&f-Ignore; can also be used to +remove a target from the default build +by specifying the directory the target will be built in as +target +and the file you want to skip selecting for building as +dependency. +Note that this only removes the target from +the default target selection algorithm: +if it is a dependency of another object being +built &SCons; still builds it normally. See the third and forth examples below. @@ -2493,15 +2602,15 @@ command typical of the POSIX programming environment (for example, pkg-config). -Note that such a comamnd is executed using the +Note that such a command is executed using the SCons execution environment; if the command needs additional information, -that information needs to be explcitly provided. +that information needs to be explicitly provided. See &f-link-ParseConfig; for more details. -Flag values are translated accordig to the prefix found, +Flag values are translated according to the prefix found, and added to the following construction variables: @@ -2512,6 +2621,7 @@ -frameworkdir= FRAMEWORKPATH -fmerge-all-constants CCFLAGS, LINKFLAGS -fopenmp CCFLAGS, LINKFLAGS +-fsanitize CCFLAGS, LINKFLAGS -include CCFLAGS -imacros CCFLAGS -isysroot CCFLAGS, LINKFLAGS @@ -2685,22 +2795,22 @@ -(key=val, delete_existing=False, [...]) +(key=val, [...], [delete_existing=False]) Prepend values to &consvars; in the current &consenv;, maintaining uniqueness. -Works like &f-link-env-Append; (see for details), +Works like &f-link-env-Append;, except that values are added to the front, -rather than the end, of any existing value of the &consvar;, -and values already present in the &consvar; -will not be added again. +rather than the end, of the &consvar;, +and values that would become duplicates +are not added. If delete_existing -is True, -the existing matching value is first removed, -and the requested value is inserted, -having the effect of moving such values to the front. +is set to a true value, then for any duplicate, +the existing instance of val is first removed, +then val is inserted, +having the effect of moving it to the front. @@ -2861,7 +2971,7 @@ - + (value) @@ -2870,18 +2980,11 @@ &scons; changes its working directory to the directory in which each -subsidiary SConscript file lives. +subsidiary SConscript file lives +while reading and processing that script. This behavior may be disabled -by specifying either: - - - -SConscriptChdir(0) -env.SConscriptChdir(0) - - - -in which case +by specifying an argument which +evaluates false, in which case &scons; will stay in the top-level directory while reading all SConscript files. @@ -2899,10 +3002,9 @@ -env = Environment() -SConscriptChdir(0) +SConscriptChdir(False) SConscript('foo/SConscript') # will not chdir to foo -env.SConscriptChdir(1) +SConscriptChdir(True) SConscript('bar/SConscript') # will chdir to bar @@ -3288,6 +3390,12 @@ +Changed in version 4.2: +&f-env-Tool; now returns the tool object, +previously it did not return (i.e. returned None). + + + Examples: @@ -3324,12 +3432,6 @@ gltool = Tool('opengl', toolpath = ['tools']) gltool(env) # adds 'opengl' to the TOOLS variable - - -Changed in &SCons; 4.2: &f-env-Tool; now returns -the tool object, previously it did not return -(i.e. returned None). - @@ -3350,10 +3452,6 @@ files are up-to-date.) When using timestamp source signatures, Value Nodes' timestamps are equal to the system time when the Node is created. -name can be provided as an alternative name -for the resulting Value node; this is advised -if the value parameter can't be converted to -a string. @@ -3373,6 +3471,18 @@ +The optional name parameter can be provided as an +alternative name for the resulting Value node; +this is advised if the value parameter +cannot be converted to a string. + + + +Changed in version 4.0: +the name parameter was added. + + + Examples: @@ -3392,8 +3502,8 @@ # Attach a .Config() builder for the above function action # to the construction environment. -env['BUILDERS']['Config'] = Builder(action = create) -env.Config(target = 'package-config', source = Value(prefix)) +env['BUILDERS']['Config'] = Builder(action=create) +env.Config(target='package-config', source=Value(prefix)) def build_value(target, source, env): # A function that "builds" a Python Value by updating @@ -3406,8 +3516,8 @@ # Attach a .UpdateValue() builder for the above function # action to the construction environment. -env['BUILDERS']['UpdateValue'] = Builder(action = build_value) -env.UpdateValue(target = Value(output), source = Value(input)) +env['BUILDERS']['UpdateValue'] = Builder(action=build_value) +env.UpdateValue(target=Value(output), source=Value(input)) @@ -3484,7 +3594,7 @@ variant_dir, regardless of the value of duplicate. -When calling an SConscript file, you can use the +When calling an SConscript file, you can use the exports keyword argument to pass parameters (individually or as an appropriately set up environment) so the SConscript can pick up the right settings for that variant build. diff -Nru scons-4.4.0+dfsg/SCons/Errors.py scons-4.5.2+dfsg/SCons/Errors.py --- scons-4.4.0+dfsg/SCons/Errors.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Errors.py 2023-03-21 16:17:04.000000000 +0000 @@ -24,60 +24,49 @@ """SCons exception classes. Used to handle internal and user errors in SCons. - """ import shutil import SCons.Util +# Note that not all Errors are defined here, some are at the point of use + class BuildError(Exception): """SCons Errors that can occur while building. - Attributes: - Information about the cause of the build error : + A :class:`BuildError` exception contains information both + about the erorr itself, and what caused the error. - errstr: a description of the error message - - status: the return code of the action that caused the build error. - Must be set to a non-zero value even if the build error is not due - to an action returning a non-zero returned code. - - exitstatus: SCons exit status due to this build error. - Must be nonzero unless due to an explicit Exit() - call. Not always the same as status, since - actions return a status code that should be - respected, but SCons typically exits with 2 - irrespective of the return value of the failed - action. - - filename: The name of the file or directory that caused the - build error. Set to None if no files are associated with - this error. This might be different from the target - being built. For example, failure to create the - directory in which the target file will appear. It - can be None if the error is not due to a particular - filename. - - exc_info: Info about exception that caused the build - error. Set to (None, None, None) if this build - error is not due to an exception. - - Information about the what caused the build error : - - node: the error occurred while building this target node(s) - - executor: the executor that caused the build to fail (might - be None if the build failures is not due to the - executor failing) - - action: the action that caused the build to fail (might be - None if the build failures is not due to the an - action failure) - - command: the command line for the action that caused the - build to fail (might be None if the build failures - is not due to the an action failure) + Attributes: + node: (*cause*) the error occurred while building this target node(s) + errstr: (*info*) a description of the error message + status: (*info*) the return code of the action that caused the build error. + Must be set to a non-zero value even if the build error is not due + to an action returning a non-zero returned code. + exitstatus: (*info*) SCons exit status due to this build error. + Must be nonzero unless due to an explicit :meth:`Exit` call. + Not always the same as ``status``, since actions return a status + code that should be respected, but SCons typically exits with 2 + irrespective of the return value of the failed action. + filename: (*info*) The name of the file or directory that caused the + build error. Set to ``None`` if no files are associated with + this error. This might be different from the target + being built. For example, failure to create the + directory in which the target file will appear. It + can be ``None`` if the error is not due to a particular + filename. + executor: (*cause*) the executor that caused the build to fail (might + be ``None`` if the build failures is not due to the executor failing) + action: (*cause*) the action that caused the build to fail (might be + ``None`` if the build failures is not due to the an + action failure) + command: (*cause*) the command line for the action that caused the + build to fail (might be ``None`` if the build failures + is not due to the an action failure) + exc_info: (*info*) Info about exception that caused the build + error. Set to ``(None, None, None)`` if this build + error is not due to an exception. """ diff -Nru scons-4.4.0+dfsg/SCons/Executor.py scons-4.5.2+dfsg/SCons/Executor.py --- scons-4.4.0+dfsg/SCons/Executor.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Executor.py 2023-03-21 16:17:04.000000000 +0000 @@ -306,30 +306,30 @@ over and over), so removing the duplicates once up front should save the Taskmaster a lot of work. """ - result = SCons.Util.UniqueList([]) + result = [] for target in self.get_all_targets(): result.extend(target.children()) - return result + return SCons.Util.uniquer_hashables(result) def get_all_prerequisites(self): """Returns all unique (order-only) prerequisites for all batches of this Executor. """ - result = SCons.Util.UniqueList([]) + result = [] for target in self.get_all_targets(): if target.prerequisites is not None: result.extend(target.prerequisites) - return result + return SCons.Util.uniquer_hashables(result) def get_action_side_effects(self): """Returns all side effects for all batches of this Executor used by the underlying Action. """ - result = SCons.Util.UniqueList([]) + result = [] for target in self.get_action_targets(): result.extend(target.side_effects) - return result + return SCons.Util.uniquer_hashables(result) @SCons.Memoize.CountMethodCall def get_build_env(self): diff -Nru scons-4.4.0+dfsg/SCons/__init__.py scons-4.5.2+dfsg/SCons/__init__.py --- scons-4.4.0+dfsg/SCons/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,9 +1,9 @@ -__version__="4.4.0" -__copyright__="Copyright (c) 2001 - 2022 The SCons Foundation" +__version__="4.5.2" +__copyright__="Copyright (c) 2001 - 2023 The SCons Foundation" __developer__="bdbaddog" -__date__="Sat, 30 Jul 2022 14:11:34 -0700" -__buildsys__="M1Dog2021" -__revision__="fc8d0ec215ee6cba8bc158ad40c099be0b598297" -__build__="fc8d0ec215ee6cba8bc158ad40c099be0b598297" +__date__="Tue, 21 Mar 2023 12:11:27 -0400" +__buildsys__="M1DOG2021" +__revision__="120fd4f633e9ef3cafbc0fec35306d7555ffd1db" +__build__="120fd4f633e9ef3cafbc0fec35306d7555ffd1db" # make sure compatibility is always in place import SCons.compat # noqa \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Job.py scons-4.5.2+dfsg/SCons/Job.py --- scons-4.4.0+dfsg/SCons/Job.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Job.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,439 +0,0 @@ -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -"""Serial and Parallel classes to execute build tasks. - -The Jobs class provides a higher level interface to start, -stop, and wait on jobs. -""" - -import SCons.compat - -import os -import signal - -import SCons.Errors -import SCons.Warnings - -# The default stack size (in kilobytes) of the threads used to execute -# jobs in parallel. -# -# We use a stack size of 256 kilobytes. The default on some platforms -# is too large and prevents us from creating enough threads to fully -# parallelized the build. For example, the default stack size on linux -# is 8 MBytes. - -explicit_stack_size = None -default_stack_size = 256 - -interrupt_msg = 'Build interrupted.' - - -class InterruptState: - def __init__(self): - self.interrupted = False - - def set(self): - self.interrupted = True - - def __call__(self): - return self.interrupted - - -class Jobs: - """An instance of this class initializes N jobs, and provides - methods for starting, stopping, and waiting on all N jobs. - """ - - def __init__(self, num, taskmaster): - """ - Create 'num' jobs using the given taskmaster. - - If 'num' is 1 or less, then a serial job will be used, - otherwise a parallel job with 'num' worker threads will - be used. - - The 'num_jobs' attribute will be set to the actual number of jobs - allocated. If more than one job is requested but the Parallel - class can't do it, it gets reset to 1. Wrapping interfaces that - care should check the value of 'num_jobs' after initialization. - """ - - self.job = None - if num > 1: - stack_size = explicit_stack_size - if stack_size is None: - stack_size = default_stack_size - - try: - self.job = Parallel(taskmaster, num, stack_size) - self.num_jobs = num - except NameError: - pass - if self.job is None: - self.job = Serial(taskmaster) - self.num_jobs = 1 - - def run(self, postfunc=lambda: None): - """Run the jobs. - - postfunc() will be invoked after the jobs has run. It will be - invoked even if the jobs are interrupted by a keyboard - interrupt (well, in fact by a signal such as either SIGINT, - SIGTERM or SIGHUP). The execution of postfunc() is protected - against keyboard interrupts and is guaranteed to run to - completion.""" - self._setup_sig_handler() - try: - self.job.start() - finally: - postfunc() - self._reset_sig_handler() - - def were_interrupted(self): - """Returns whether the jobs were interrupted by a signal.""" - return self.job.interrupted() - - def _setup_sig_handler(self): - """Setup an interrupt handler so that SCons can shutdown cleanly in - various conditions: - - a) SIGINT: Keyboard interrupt - b) SIGTERM: kill or system shutdown - c) SIGHUP: Controlling shell exiting - - We handle all of these cases by stopping the taskmaster. It - turns out that it's very difficult to stop the build process - by throwing asynchronously an exception such as - KeyboardInterrupt. For example, the python Condition - variables (threading.Condition) and queues do not seem to be - asynchronous-exception-safe. It would require adding a whole - bunch of try/finally block and except KeyboardInterrupt all - over the place. - - Note also that we have to be careful to handle the case when - SCons forks before executing another process. In that case, we - want the child to exit immediately. - """ - def handler(signum, stack, self=self, parentpid=os.getpid()): - if os.getpid() == parentpid: - self.job.taskmaster.stop() - self.job.interrupted.set() - else: - os._exit(2) # pylint: disable=protected-access - - self.old_sigint = signal.signal(signal.SIGINT, handler) - self.old_sigterm = signal.signal(signal.SIGTERM, handler) - try: - self.old_sighup = signal.signal(signal.SIGHUP, handler) - except AttributeError: - pass - if (self.old_sigint is None) or (self.old_sigterm is None) or \ - (hasattr(self, "old_sighup") and self.old_sighup is None): - msg = "Overwritting previous signal handler which was not installed from Python. " + \ - "Will not be able to reinstate and so will return to default handler." - SCons.Warnings.warn(SCons.Warnings.SConsWarning, msg) - - def _reset_sig_handler(self): - """Restore the signal handlers to their previous state (before the - call to _setup_sig_handler().""" - sigint_to_use = self.old_sigint if self.old_sigint is not None else signal.SIG_DFL - sigterm_to_use = self.old_sigterm if self.old_sigterm is not None else signal.SIG_DFL - signal.signal(signal.SIGINT, sigint_to_use) - signal.signal(signal.SIGTERM, sigterm_to_use) - try: - sigterm_to_use = self.old_sighup if self.old_sighup is not None else signal.SIG_DFL - signal.signal(signal.SIGHUP, sigterm_to_use) - except AttributeError: - pass - -class Serial: - """This class is used to execute tasks in series, and is more efficient - than Parallel, but is only appropriate for non-parallel builds. Only - one instance of this class should be in existence at a time. - - This class is not thread safe. - """ - - def __init__(self, taskmaster): - """Create a new serial job given a taskmaster. - - The taskmaster's next_task() method should return the next task - that needs to be executed, or None if there are no more tasks. The - taskmaster's executed() method will be called for each task when it - is successfully executed, or failed() will be called if it failed to - execute (e.g. execute() raised an exception).""" - - self.taskmaster = taskmaster - self.interrupted = InterruptState() - - def start(self): - """Start the job. This will begin pulling tasks from the taskmaster - and executing them, and return when there are no more tasks. If a task - fails to execute (i.e. execute() raises an exception), then the job will - stop.""" - - while True: - task = self.taskmaster.next_task() - - if task is None: - break - - try: - task.prepare() - if task.needs_execute(): - task.execute() - except Exception: - if self.interrupted(): - try: - raise SCons.Errors.BuildError( - task.targets[0], errstr=interrupt_msg) - except: - task.exception_set() - else: - task.exception_set() - - # Let the failed() callback function arrange for the - # build to stop if that's appropriate. - task.failed() - else: - task.executed() - - task.postprocess() - self.taskmaster.cleanup() - - -# Trap import failure so that everything in the Job module but the -# Parallel class (and its dependent classes) will work if the interpreter -# doesn't support threads. -try: - import queue - import threading -except ImportError: - pass -else: - class Worker(threading.Thread): - """A worker thread waits on a task to be posted to its request queue, - dequeues the task, executes it, and posts a tuple including the task - and a boolean indicating whether the task executed successfully. """ - - def __init__(self, requestQueue, resultsQueue, interrupted): - super().__init__() - self.daemon = True - self.requestQueue = requestQueue - self.resultsQueue = resultsQueue - self.interrupted = interrupted - self.start() - - def run(self): - while True: - task = self.requestQueue.get() - - if task is None: - # The "None" value is used as a sentinel by - # ThreadPool.cleanup(). This indicates that there - # are no more tasks, so we should quit. - break - - try: - if self.interrupted(): - raise SCons.Errors.BuildError( - task.targets[0], errstr=interrupt_msg) - task.execute() - except: - task.exception_set() - ok = False - else: - ok = True - - self.resultsQueue.put((task, ok)) - - class ThreadPool: - """This class is responsible for spawning and managing worker threads.""" - - def __init__(self, num, stack_size, interrupted): - """Create the request and reply queues, and 'num' worker threads. - - One must specify the stack size of the worker threads. The - stack size is specified in kilobytes. - """ - self.requestQueue = queue.Queue(0) - self.resultsQueue = queue.Queue(0) - - try: - prev_size = threading.stack_size(stack_size*1024) - except AttributeError as e: - # Only print a warning if the stack size has been - # explicitly set. - if explicit_stack_size is not None: - msg = "Setting stack size is unsupported by this version of Python:\n " + \ - e.args[0] - SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) - except ValueError as e: - msg = "Setting stack size failed:\n " + str(e) - SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) - - # Create worker threads - self.workers = [] - for _ in range(num): - worker = Worker(self.requestQueue, self.resultsQueue, interrupted) - self.workers.append(worker) - - if 'prev_size' in locals(): - threading.stack_size(prev_size) - - def put(self, task): - """Put task into request queue.""" - self.requestQueue.put(task) - - def get(self): - """Remove and return a result tuple from the results queue.""" - return self.resultsQueue.get() - - def preparation_failed(self, task): - self.resultsQueue.put((task, False)) - - def cleanup(self): - """ - Shuts down the thread pool, giving each worker thread a - chance to shut down gracefully. - """ - # For each worker thread, put a sentinel "None" value - # on the requestQueue (indicating that there's no work - # to be done) so that each worker thread will get one and - # terminate gracefully. - for _ in self.workers: - self.requestQueue.put(None) - - # Wait for all of the workers to terminate. - # - # If we don't do this, later Python versions (2.4, 2.5) often - # seem to raise exceptions during shutdown. This happens - # in requestQueue.get(), as an assertion failure that - # requestQueue.not_full is notified while not acquired, - # seemingly because the main thread has shut down (or is - # in the process of doing so) while the workers are still - # trying to pull sentinels off the requestQueue. - # - # Normally these terminations should happen fairly quickly, - # but we'll stick a one-second timeout on here just in case - # someone gets hung. - for worker in self.workers: - worker.join(1.0) - self.workers = [] - - class Parallel: - """This class is used to execute tasks in parallel, and is somewhat - less efficient than Serial, but is appropriate for parallel builds. - - This class is thread safe. - """ - - def __init__(self, taskmaster, num, stack_size): - """Create a new parallel job given a taskmaster. - - The taskmaster's next_task() method should return the next - task that needs to be executed, or None if there are no more - tasks. The taskmaster's executed() method will be called - for each task when it is successfully executed, or failed() - will be called if the task failed to execute (i.e. execute() - raised an exception). - - Note: calls to taskmaster are serialized, but calls to - execute() on distinct tasks are not serialized, because - that is the whole point of parallel jobs: they can execute - multiple tasks simultaneously. """ - - self.taskmaster = taskmaster - self.interrupted = InterruptState() - self.tp = ThreadPool(num, stack_size, self.interrupted) - - self.maxjobs = num - - def start(self): - """Start the job. This will begin pulling tasks from the - taskmaster and executing them, and return when there are no - more tasks. If a task fails to execute (i.e. execute() raises - an exception), then the job will stop.""" - - jobs = 0 - - while True: - # Start up as many available tasks as we're - # allowed to. - while jobs < self.maxjobs: - task = self.taskmaster.next_task() - if task is None: - break - - try: - # prepare task for execution - task.prepare() - except: - task.exception_set() - task.failed() - task.postprocess() - else: - if task.needs_execute(): - # dispatch task - self.tp.put(task) - jobs += 1 - else: - task.executed() - task.postprocess() - - if not task and not jobs: break - - # Let any/all completed tasks finish up before we go - # back and put the next batch of tasks on the queue. - while True: - task, ok = self.tp.get() - jobs -= 1 - - if ok: - task.executed() - else: - if self.interrupted(): - try: - raise SCons.Errors.BuildError( - task.targets[0], errstr=interrupt_msg) - except: - task.exception_set() - - # Let the failed() callback function arrange - # for the build to stop if that's appropriate. - task.failed() - - task.postprocess() - - if self.tp.resultsQueue.empty(): - break - - self.tp.cleanup() - self.taskmaster.cleanup() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/SCons/JobTests.py scons-4.5.2+dfsg/SCons/JobTests.py --- scons-4.4.0+dfsg/SCons/JobTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/JobTests.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,575 +0,0 @@ -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -import unittest -import random -import math -import sys -import time -import os - -import TestUnit - -import SCons.Job - - -def get_cpu_nums(): - # Linux, Unix and MacOS: - if hasattr( os, "sysconf" ): - if "SC_NPROCESSORS_ONLN" in os.sysconf_names: - # Linux & Unix: - ncpus = os.sysconf( "SC_NPROCESSORS_ONLN" ) - if isinstance(ncpus, int) and ncpus > 0: - return ncpus - else: # OSX: - return int(os.popen2("sysctl -n hw.ncpu")[1].read() ) - # Windows: - if "NUMBER_OF_PROCESSORS" in os.environ: - ncpus = int(os.environ["NUMBER_OF_PROCESSORS"]) - if ncpus > 0: - return ncpus - return 1 # Default - -# a large number -num_sines = 500 - -# how many parallel jobs to perform for the test -num_jobs = get_cpu_nums()*2 - -# in case we werent able to detect num cpus for this test -# just make a hardcoded suffcient large number, though not future proof -if num_jobs == 2: - num_jobs = 33 - -# how many tasks to perform for the test -num_tasks = num_jobs*5 - -class DummyLock: - """fake lock class to use if threads are not supported""" - def acquire(self): - pass - - def release(self): - pass - -class NoThreadsException(Exception): - """raised by the ParallelTestCase if threads are not supported""" - - def __str__(self): - return "the interpreter doesn't support threads" - -class Task: - """A dummy task class for testing purposes.""" - - def __init__(self, i, taskmaster): - self.i = i - self.taskmaster = taskmaster - self.was_executed = 0 - self.was_prepared = 0 - - def prepare(self): - self.was_prepared = 1 - - def _do_something(self): - pass - - def needs_execute(self): - return True - - def execute(self): - self.taskmaster.test_case.assertTrue(self.was_prepared, - "the task wasn't prepared") - - self.taskmaster.guard.acquire() - self.taskmaster.begin_list.append(self.i) - self.taskmaster.guard.release() - - # while task is executing, represent this in the parallel_list - # and then turn it off - self.taskmaster.parallel_list[self.i] = 1 - self._do_something() - self.taskmaster.parallel_list[self.i] = 0 - - # check if task was executing while another was also executing - for j in range(1, self.taskmaster.num_tasks): - if self.taskmaster.parallel_list[j + 1] == 1: - self.taskmaster.found_parallel = True - break - - self.was_executed = 1 - - self.taskmaster.guard.acquire() - self.taskmaster.end_list.append(self.i) - self.taskmaster.guard.release() - - def executed(self): - self.taskmaster.num_executed = self.taskmaster.num_executed + 1 - - self.taskmaster.test_case.assertTrue(self.was_prepared, - "the task wasn't prepared") - self.taskmaster.test_case.assertTrue(self.was_executed, - "the task wasn't really executed") - self.taskmaster.test_case.assertTrue(isinstance(self, Task), - "the task wasn't really a Task instance") - - def failed(self): - self.taskmaster.num_failed = self.taskmaster.num_failed + 1 - self.taskmaster.stop = 1 - self.taskmaster.test_case.assertTrue(self.was_prepared, - "the task wasn't prepared") - - def postprocess(self): - self.taskmaster.num_postprocessed = self.taskmaster.num_postprocessed + 1 - - def exception_set(self): - pass - -class RandomTask(Task): - def _do_something(self): - # do something that will take some random amount of time: - for i in range(random.randrange(0, 100 + num_sines, 1)): - x = math.sin(i) - time.sleep(0.01) - -class ExceptionTask: - """A dummy task class for testing purposes.""" - - def __init__(self, i, taskmaster): - self.taskmaster = taskmaster - self.was_prepared = 0 - - def prepare(self): - self.was_prepared = 1 - - def needs_execute(self): - return True - - def execute(self): - raise Exception - - def executed(self): - self.taskmaster.num_executed = self.taskmaster.num_executed + 1 - - self.taskmaster.test_case.assertTrue(self.was_prepared, - "the task wasn't prepared") - self.taskmaster.test_case.assertTrue(self.was_executed, - "the task wasn't really executed") - self.taskmaster.test_case.assertTrue(self.__class__ is Task, - "the task wasn't really a Task instance") - - def failed(self): - self.taskmaster.num_failed = self.taskmaster.num_failed + 1 - self.taskmaster.stop = 1 - self.taskmaster.test_case.assertTrue(self.was_prepared, - "the task wasn't prepared") - - def postprocess(self): - self.taskmaster.num_postprocessed = self.taskmaster.num_postprocessed + 1 - - def exception_set(self): - self.taskmaster.exception_set() - -class Taskmaster: - """A dummy taskmaster class for testing the job classes.""" - - def __init__(self, n, test_case, Task): - """n is the number of dummy tasks to perform.""" - - self.test_case = test_case - self.stop = None - self.num_tasks = n - self.num_iterated = 0 - self.num_executed = 0 - self.num_failed = 0 - self.num_postprocessed = 0 - self.parallel_list = [0] * (n+1) - self.found_parallel = False - self.Task = Task - - # 'guard' guards 'task_begin_list' and 'task_end_list' - try: - import threading - self.guard = threading.Lock() - except ImportError: - self.guard = DummyLock() - - # keep track of the order tasks are begun in - self.begin_list = [] - - # keep track of the order tasks are completed in - self.end_list = [] - - def next_task(self): - if self.stop or self.all_tasks_are_iterated(): - return None - else: - self.num_iterated = self.num_iterated + 1 - return self.Task(self.num_iterated, self) - - def all_tasks_are_executed(self): - return self.num_executed == self.num_tasks - - def all_tasks_are_iterated(self): - return self.num_iterated == self.num_tasks - - def all_tasks_are_postprocessed(self): - return self.num_postprocessed == self.num_tasks - - def tasks_were_serial(self): - """analyze the task order to see if they were serial""" - return not self.found_parallel - - def exception_set(self): - pass - - def cleanup(self): - pass - -SaveThreadPool = None -ThreadPoolCallList = [] - -class ParallelTestCase(unittest.TestCase): - def runTest(self): - """test parallel jobs""" - - try: - import threading - except ImportError: - raise NoThreadsException() - - taskmaster = Taskmaster(num_tasks, self, RandomTask) - jobs = SCons.Job.Jobs(num_jobs, taskmaster) - jobs.run() - - self.assertTrue(not taskmaster.tasks_were_serial(), - "the tasks were not executed in parallel") - self.assertTrue(taskmaster.all_tasks_are_executed(), - "all the tests were not executed") - self.assertTrue(taskmaster.all_tasks_are_iterated(), - "all the tests were not iterated over") - self.assertTrue(taskmaster.all_tasks_are_postprocessed(), - "all the tests were not postprocessed") - self.assertFalse(taskmaster.num_failed, - "some task(s) failed to execute") - - # Verify that parallel jobs will pull all of the completed tasks - # out of the queue at once, instead of one by one. We do this by - # replacing the default ThreadPool class with one that records the - # order in which tasks are put() and get() to/from the pool, and - # which sleeps a little bit before call get() to let the initial - # tasks complete and get their notifications on the resultsQueue. - - class SleepTask(Task): - def _do_something(self): - time.sleep(0.01) - - global SaveThreadPool - SaveThreadPool = SCons.Job.ThreadPool - - class WaitThreadPool(SaveThreadPool): - def put(self, task): - ThreadPoolCallList.append('put(%s)' % task.i) - return SaveThreadPool.put(self, task) - def get(self): - time.sleep(0.05) - result = SaveThreadPool.get(self) - ThreadPoolCallList.append('get(%s)' % result[0].i) - return result - - SCons.Job.ThreadPool = WaitThreadPool - - try: - taskmaster = Taskmaster(3, self, SleepTask) - jobs = SCons.Job.Jobs(2, taskmaster) - jobs.run() - - # The key here is that we get(1) and get(2) from the - # resultsQueue before we put(3), but get(1) and get(2) can - # be in either order depending on how the first two parallel - # tasks get scheduled by the operating system. - expect = [ - ['put(1)', 'put(2)', 'get(1)', 'get(2)', 'put(3)', 'get(3)'], - ['put(1)', 'put(2)', 'get(2)', 'get(1)', 'put(3)', 'get(3)'], - ] - assert ThreadPoolCallList in expect, ThreadPoolCallList - - finally: - SCons.Job.ThreadPool = SaveThreadPool - -class SerialTestCase(unittest.TestCase): - def runTest(self): - """test a serial job""" - - taskmaster = Taskmaster(num_tasks, self, RandomTask) - jobs = SCons.Job.Jobs(1, taskmaster) - jobs.run() - - self.assertTrue(taskmaster.tasks_were_serial(), - "the tasks were not executed in series") - self.assertTrue(taskmaster.all_tasks_are_executed(), - "all the tests were not executed") - self.assertTrue(taskmaster.all_tasks_are_iterated(), - "all the tests were not iterated over") - self.assertTrue(taskmaster.all_tasks_are_postprocessed(), - "all the tests were not postprocessed") - self.assertFalse(taskmaster.num_failed, - "some task(s) failed to execute") - -class NoParallelTestCase(unittest.TestCase): - def runTest(self): - """test handling lack of parallel support""" - def NoParallel(tm, num, stack_size): - raise NameError - save_Parallel = SCons.Job.Parallel - SCons.Job.Parallel = NoParallel - try: - taskmaster = Taskmaster(num_tasks, self, RandomTask) - jobs = SCons.Job.Jobs(2, taskmaster) - self.assertTrue(jobs.num_jobs == 1, - "unexpected number of jobs %d" % jobs.num_jobs) - jobs.run() - self.assertTrue(taskmaster.tasks_were_serial(), - "the tasks were not executed in series") - self.assertTrue(taskmaster.all_tasks_are_executed(), - "all the tests were not executed") - self.assertTrue(taskmaster.all_tasks_are_iterated(), - "all the tests were not iterated over") - self.assertTrue(taskmaster.all_tasks_are_postprocessed(), - "all the tests were not postprocessed") - self.assertFalse(taskmaster.num_failed, - "some task(s) failed to execute") - finally: - SCons.Job.Parallel = save_Parallel - - -class SerialExceptionTestCase(unittest.TestCase): - def runTest(self): - """test a serial job with tasks that raise exceptions""" - - taskmaster = Taskmaster(num_tasks, self, ExceptionTask) - jobs = SCons.Job.Jobs(1, taskmaster) - jobs.run() - - self.assertFalse(taskmaster.num_executed, - "a task was executed") - self.assertTrue(taskmaster.num_iterated == 1, - "exactly one task should have been iterated") - self.assertTrue(taskmaster.num_failed == 1, - "exactly one task should have failed") - self.assertTrue(taskmaster.num_postprocessed == 1, - "exactly one task should have been postprocessed") - -class ParallelExceptionTestCase(unittest.TestCase): - def runTest(self): - """test parallel jobs with tasks that raise exceptions""" - - taskmaster = Taskmaster(num_tasks, self, ExceptionTask) - jobs = SCons.Job.Jobs(num_jobs, taskmaster) - jobs.run() - - self.assertFalse(taskmaster.num_executed, - "a task was executed") - self.assertTrue(taskmaster.num_iterated >= 1, - "one or more task should have been iterated") - self.assertTrue(taskmaster.num_failed >= 1, - "one or more tasks should have failed") - self.assertTrue(taskmaster.num_postprocessed >= 1, - "one or more tasks should have been postprocessed") - -#--------------------------------------------------------------------- -# Above tested Job object with contrived Task and Taskmaster objects. -# Now test Job object with actual Task and Taskmaster objects. - -import SCons.Taskmaster -import SCons.Node -import time - -class DummyNodeInfo: - def update(self, obj): - pass - -class testnode (SCons.Node.Node): - def __init__(self): - super().__init__() - self.expect_to_be = SCons.Node.executed - self.ninfo = DummyNodeInfo() - -class goodnode (testnode): - def __init__(self): - super().__init__() - self.expect_to_be = SCons.Node.up_to_date - self.ninfo = DummyNodeInfo() - -class slowgoodnode (goodnode): - def prepare(self): - # Delay to allow scheduled Jobs to run while the dispatcher - # sleeps. Keep this short because it affects the time taken - # by this test. - time.sleep(0.15) - goodnode.prepare(self) - -class badnode (goodnode): - def __init__(self): - super().__init__() - self.expect_to_be = SCons.Node.failed - def build(self, **kw): - raise Exception('badnode exception') - -class slowbadnode (badnode): - def build(self, **kw): - # Appears to take a while to build, allowing faster builds to - # overlap. Time duration is not especially important, but if - # it is faster than slowgoodnode then these could complete - # while the scheduler is sleeping. - time.sleep(0.05) - raise Exception('slowbadnode exception') - -class badpreparenode (badnode): - def prepare(self): - raise Exception('badpreparenode exception') - -class _SConsTaskTest(unittest.TestCase): - - def _test_seq(self, num_jobs): - for node_seq in [ - [goodnode], - [badnode], - [slowbadnode], - [slowgoodnode], - [badpreparenode], - [goodnode, badnode], - [slowgoodnode, badnode], - [goodnode, slowbadnode], - [goodnode, goodnode, goodnode, slowbadnode], - [goodnode, slowbadnode, badpreparenode, slowgoodnode], - [goodnode, slowbadnode, slowgoodnode, badnode] - ]: - - self._do_test(num_jobs, node_seq) - - def _do_test(self, num_jobs, node_seq): - - testnodes = [] - for tnum in range(num_tasks): - testnodes.append(node_seq[tnum % len(node_seq)]()) - - taskmaster = SCons.Taskmaster.Taskmaster(testnodes, - tasker=SCons.Taskmaster.AlwaysTask) - - jobs = SCons.Job.Jobs(num_jobs, taskmaster) - - # Exceptions thrown by tasks are not actually propagated to - # this level, but are instead stored in the Taskmaster. - - jobs.run() - - # Now figure out if tests proceeded correctly. The first test - # that fails will shutdown the initiation of subsequent tests, - # but any tests currently queued for execution will still be - # processed, and any tests that completed before the failure - # would have resulted in new tests being queued for execution. - - # Apply the following operational heuristics of Job.py: - # 0) An initial jobset of tasks will be queued before any - # good/bad results are obtained (from "execute" of task in - # thread). - # 1) A goodnode will complete immediately on its thread and - # allow another node to be queued for execution. - # 2) A badnode will complete immediately and suppress any - # subsequent execution queuing, but all currently queued - # tasks will still be processed. - # 3) A slowbadnode will fail later. It will block slots in - # the job queue. Nodes that complete immediately will - # allow other nodes to be queued in their place, and this - # will continue until either (#2) above or until all job - # slots are filled with slowbadnode entries. - - # One approach to validating this test would be to try to - # determine exactly how many nodes executed, how many didn't, - # and the results of each, and then to assert failure on any - # mismatch (including the total number of built nodes). - # However, while this is possible to do for a single-processor - # system, it is nearly impossible to predict correctly for a - # multi-processor system and still test the characteristics of - # delayed execution nodes. Stated another way, multithreading - # is inherently non-deterministic unless you can completely - # characterize the entire system, and since that's not - # possible here, we shouldn't try. - - # Therefore, this test will simply scan the set of nodes to - # see if the node was executed or not and if it was executed - # that it obtained the expected value for that node - # (i.e. verifying we don't get failure crossovers or - # mislabelling of results). - - for N in testnodes: - state = N.get_state() - self.assertTrue(state in [SCons.Node.no_state, N.expect_to_be], - "Node %s got unexpected result: %s" % (N, state)) - - self.assertTrue([N for N in testnodes if N.get_state()], - "no nodes ran at all.") - - -class SerialTaskTest(_SConsTaskTest): - def runTest(self): - """test serial jobs with actual Taskmaster and Task""" - self._test_seq(1) - - -class ParallelTaskTest(_SConsTaskTest): - def runTest(self): - """test parallel jobs with actual Taskmaster and Task""" - self._test_seq(num_jobs) - - - -#--------------------------------------------------------------------- - -def suite(): - suite = unittest.TestSuite() - suite.addTest(ParallelTestCase()) - suite.addTest(SerialTestCase()) - suite.addTest(NoParallelTestCase()) - suite.addTest(SerialExceptionTestCase()) - suite.addTest(ParallelExceptionTestCase()) - suite.addTest(SerialTaskTest()) - suite.addTest(ParallelTaskTest()) - return suite - -if __name__ == "__main__": - runner = TestUnit.cli.get_runner() - result = runner().run(suite()) - if (len(result.failures) == 0 - and len(result.errors) == 1 - and isinstance(result.errors[0][0], SerialTestCase) - and isinstance(result.errors[0][1][0], NoThreadsException)): - sys.exit(2) - elif not result.wasSuccessful(): - sys.exit(1) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/SCons/Node/FS.py scons-4.5.2+dfsg/SCons/Node/FS.py --- scons-4.4.0+dfsg/SCons/Node/FS.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Node/FS.py 2023-03-21 16:17:04.000000000 +0000 @@ -378,20 +378,33 @@ return x.upper() - class DiskChecker: - def __init__(self, type, do, ignore): - self.type = type - self.do = do - self.ignore = ignore - self.func = do + """ + Implement disk check variation. + + This Class will hold functions to determine what this particular disk + checking implementation should do when enabled or disabled. + """ + def __init__(self, disk_check_type, do_check_function, ignore_check_function): + self.disk_check_type = disk_check_type + self.do_check_function = do_check_function + self.ignore_check_function = ignore_check_function + self.func = do_check_function + def __call__(self, *args, **kw): return self.func(*args, **kw) - def set(self, list): - if self.type in list: - self.func = self.do + + def enable(self, disk_check_type_list): + """ + If the current object's disk_check_type matches any in the list passed + :param disk_check_type_list: List of disk checks to enable + :return: + """ + if self.disk_check_type in disk_check_type_list: + self.func = self.do_check_function else: - self.func = self.ignore + self.func = self.ignore_check_function + def do_diskcheck_match(node, predicate, errorfmt): result = predicate() @@ -409,24 +422,25 @@ if result: raise TypeError(errorfmt % node.get_abspath()) + def ignore_diskcheck_match(node, predicate, errorfmt): pass - diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match) diskcheckers = [ diskcheck_match, ] -def set_diskcheck(list): + +def set_diskcheck(enabled_checkers): for dc in diskcheckers: - dc.set(list) + dc.enable(enabled_checkers) -def diskcheck_types(): - return [dc.type for dc in diskcheckers] +def diskcheck_types(): + return [dc.disk_check_type for dc in diskcheckers] class EntryProxy(SCons.Util.Proxy): @@ -1239,7 +1253,7 @@ else: return "" - def chdir(self, dir, change_os_dir=0): + def chdir(self, dir, change_os_dir=False): """Change the current working directory for lookups. If change_os_dir is true, we will also change the "real" cwd to match. @@ -2157,49 +2171,52 @@ for dirname in [n for n in names if isinstance(entries[n], Dir)]: entries[dirname].walk(func, arg) - def glob(self, pathname, ondisk=True, source=False, strings=False, exclude=None): - """ - Returns a list of Nodes (or strings) matching a specified - pathname pattern. + def glob(self, pathname, ondisk=True, source=False, strings=False, exclude=None) -> list: + """Returns a list of Nodes (or strings) matching a pathname pattern. - Pathname patterns follow UNIX shell semantics: * matches - any-length strings of any characters, ? matches any character, - and [] can enclose lists or ranges of characters. Matches do - not span directory separators. + Pathname patterns follow POSIX shell syntax:: - The matches take into account Repositories, returning local - Nodes if a corresponding entry exists in a Repository (either + * matches everything + ? matches any single character + [seq] matches any character in seq (ranges allowed) + [!seq] matches any char not in seq + + The wildcard characters can be escaped by enclosing in brackets. + A leading dot is not matched by a wildcard, and needs to be + explicitly included in the pattern to be matched. Matches also + do not span directory separators. + + The matches take into account Repositories, returning a local + Node if a corresponding entry exists in a Repository (either an in-memory Node or something on disk). - By defafult, the glob() function matches entries that exist - on-disk, in addition to in-memory Nodes. Setting the "ondisk" - argument to False (or some other non-true value) causes the glob() - function to only match in-memory Nodes. The default behavior is - to return both the on-disk and in-memory Nodes. - - The "source" argument, when true, specifies that corresponding - source Nodes must be returned if you're globbing in a build - directory (initialized with VariantDir()). The default behavior - is to return Nodes local to the VariantDir(). - - The "strings" argument, when true, returns the matches as strings, - not Nodes. The strings are path names relative to this directory. - - The "exclude" argument, if not None, must be a pattern or a list - of patterns following the same UNIX shell semantics. - Elements matching a least one pattern of this list will be excluded - from the result. - - The underlying algorithm is adapted from the glob.glob() function - in the Python library (but heavily modified), and uses fnmatch() - under the covers. + The underlying algorithm is adapted from a rather old version + of :func:`glob.glob` function in the Python standard library + (heavily modified), and uses :func:`fnmatch.fnmatch` under the covers. + + This is the internal implementation of the external Glob API. + + Args: + pattern: pathname pattern to match. + ondisk: if false, restricts matches to in-memory Nodes. + By defafult, matches entries that exist on-disk in addition + to in-memory Nodes. + source: if true, corresponding source Nodes are returned if + globbing in a variant directory. The default behavior + is to return Nodes local to the variant directory. + strings: if true, returns the matches as strings instead of + Nodes. The strings are path names relative to this directory. + exclude: if not ``None``, must be a pattern or a list of patterns + following the same POSIX shell semantics. Elements matching at + least one pattern from *exclude* will be excluded from the result. + """ dirname, basename = os.path.split(pathname) if not dirname: result = self._glob1(basename, ondisk, source, strings) else: if has_glob_magic(dirname): - list = self.glob(dirname, ondisk, source, False, exclude) + list = self.glob(dirname, ondisk, source, strings=False, exclude=exclude) else: list = [self.Dir(dirname, create=True)] result = [] @@ -2226,7 +2243,8 @@ corresponding entries and returns a Node (or string) relative to the current directory if an entry is found anywhere. - TODO: handle pattern with no wildcard + TODO: handle pattern with no wildcard. Python's glob.glob uses + a separate _glob0 function to do this. """ search_dir_list = self.get_all_rdirs() for srcdir in self.srcdir_list(): @@ -2399,7 +2417,7 @@ return Base.must_be_same(self, klass) - def _lookup_abs(self, p, klass, create=1): + def _lookup_abs(self, p, klass, create=True): """ Fast (?) lookup of a *normalized* absolute path. @@ -2424,7 +2442,7 @@ raise SCons.Errors.UserError(msg) # There is no Node for this path name, and we're allowed # to create it. - dir_name, file_name = p.rsplit('/',1) + dir_name, file_name = p.rsplit('/', 1) dir_node = self._lookup_abs(dir_name, Dir) result = klass(file_name, dir_node, self.fs) diff -Nru scons-4.4.0+dfsg/SCons/Node/FSTests.py scons-4.5.2+dfsg/SCons/Node/FSTests.py --- scons-4.4.0+dfsg/SCons/Node/FSTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Node/FSTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -1822,7 +1822,7 @@ test.write(['subdir', 'build'], "subdir/build\n") subdir = fs.Dir('subdir') - fs.chdir(subdir, change_os_dir=1) + fs.chdir(subdir, change_os_dir=True) self.fs._lookup('#build/file', subdir, SCons.Node.FS.File) def test_above_root(self): diff -Nru scons-4.4.0+dfsg/SCons/Node/__init__.py scons-4.5.2+dfsg/SCons/Node/__init__.py --- scons-4.4.0+dfsg/SCons/Node/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Node/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -685,8 +685,8 @@ """Try to retrieve the node's content from a cache This method is called from multiple threads in a parallel build, - so only do thread safe stuff here. Do thread unsafe stuff in - built(). + so only do thread safe stuff here. Do thread unsafe stuff + in :meth:`built`. Returns true if the node was successfully retrieved. """ @@ -743,12 +743,12 @@ """Actually build the node. This is called by the Taskmaster after it's decided that the - Node is out-of-date and must be rebuilt, and after the prepare() - method has gotten everything, uh, prepared. + Node is out-of-date and must be rebuilt, and after the + :meth:`prepare` method has gotten everything, uh, prepared. This method is called from multiple threads in a parallel build, so only do thread safe stuff here. Do thread unsafe stuff - in built(). + in :meth:`built`. """ try: diff -Nru scons-4.4.0+dfsg/SCons/Node/Python.py scons-4.5.2+dfsg/SCons/Node/Python.py --- scons-4.4.0+dfsg/SCons/Node/Python.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Node/Python.py 2023-03-21 16:17:04.000000000 +0000 @@ -75,8 +75,13 @@ class Value(SCons.Node.Node): - """A class for Python variables, typically passed on the command line - or generated by a script, but not from a file or some other source. + """A Node class for values represented by Python expressions. + + Values are typically passed on the command line or generated + by a script, but not from a file or some other source. + + .. versionchanged:: 4.0 + the *name* parameter was added. """ NodeInfo = ValueNodeInfo @@ -165,8 +170,10 @@ def ValueWithMemo(value, built_value=None, name=None): - """ - Memoized Value() node factory. + """Memoized :class:`Value` node factory. + + .. versionchanged:: 4.0 + the *name* parameter was added. """ global _memo_lookup_map diff -Nru scons-4.4.0+dfsg/SCons/Platform/darwin.py scons-4.5.2+dfsg/SCons/Platform/darwin.py --- scons-4.4.0+dfsg/SCons/Platform/darwin.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Platform/darwin.py 2023-03-21 16:17:04.000000000 +0000 @@ -31,6 +31,7 @@ from . import posix import os + def generate(env): posix.generate(env) env['SHLIBSUFFIX'] = '.dylib' @@ -40,7 +41,7 @@ # env['ENV']['PATH'] = '/opt/local/bin:/opt/local/sbin:' + env['ENV']['PATH'] + ':/sw/bin' # Store extra system paths in env['ENV']['PATHOSX'] - + filelist = ['/etc/paths',] # make sure this works on Macs with Tiger or earlier try: diff -Nru scons-4.4.0+dfsg/SCons/Scanner/Java.py scons-4.5.2+dfsg/SCons/Scanner/Java.py --- scons-4.4.0+dfsg/SCons/Scanner/Java.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Scanner/Java.py 2023-03-21 16:17:04.000000000 +0000 @@ -21,56 +21,65 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -import os +import os import SCons.Node import SCons.Node.FS import SCons.Scanner -import SCons.Util +from SCons.Util import flatten, is_String -def _subst_libs(env, libs): - """ - Substitute environment variables and split into list. +def _subst_paths(env, paths) -> list: + """Return a list of substituted path elements. + + If *paths* is a string, it is split on the search-path separator. + Otherwise, substitution is done on string-valued list elements but + they are not split. + + Note helps support behavior like pulling in the external ``CLASSPATH`` + and setting it directly into ``JAVACLASSPATH``, however splitting on + ``os.pathsep`` makes the interpretation system-specific (this is + warned about in the manpage entry for ``JAVACLASSPATH``). """ - if SCons.Util.is_String(libs): - libs = env.subst(libs) - if SCons.Util.is_String(libs): - libs = libs.split() - elif SCons.Util.is_Sequence(libs): - _libs = [] - for lib in libs: - _libs += _subst_libs(env, lib) - libs = _libs + if is_String(paths): + paths = env.subst(paths) + if SCons.Util.is_String(paths): + paths = paths.split(os.pathsep) else: - # libs is an object (Node, for example) - libs = [libs] - return libs + # TODO: may want to revisit splitting list-element strings if requested + paths = flatten(paths) + paths = [env.subst(path) if is_String(path) else path for path in paths] + return paths -def _collect_classes(list, dirname, files): +def _collect_classes(classlist, dirname, files): for fname in files: - if os.path.splitext(fname)[1] == ".class": - list.append(os.path.join(str(dirname), fname)) + if fname.endswith(".class"): + classlist.append(os.path.join(str(dirname), fname)) -def scan(node, env, libpath=()): - """Scan for files on the JAVACLASSPATH. +def scan(node, env, libpath=()) -> list: + """Scan for files both on JAVACLASSPATH and JAVAPROCESSORPATH. - The classpath can contain: + JAVACLASSPATH/JAVAPROCESSORPATH path can contain: - Explicit paths to JAR/Zip files - Wildcards (*) - Directories which contain classes in an unnamed package - Parent directories of the root package for classes in a named package - Class path entries that are neither directories nor archives (.zip or JAR files) nor the asterisk (*) wildcard character are ignored. - """ - classpath = env.get('JAVACLASSPATH', []) - classpath = _subst_libs(env, classpath) + Class path entries that are neither directories nor archives (.zip + or JAR files) nor the asterisk (*) wildcard character are ignored. + """ + classpath = [] + for var in ['JAVACLASSPATH', 'JAVAPROCESSORPATH']: + classpath += _subst_paths(env, env.get(var, [])) result = [] for path in classpath: - if SCons.Util.is_String(path) and "*" in path: + if is_String(path) and "*" in path: + # This matches more than the Java docs describe: a '*' only + # matches jar files. The filter later should trim this down. + # TODO: should we filter here? use .endswith('*') rather than "in"? libs = env.Glob(path) else: libs = [path] @@ -89,8 +98,11 @@ def JavaScanner(): - return SCons.Scanner.Base(scan, 'JavaScanner', - skeys=['.java']) + """Scanner for .java files. + + .. versionadded:: 4.4 + """ + return SCons.Scanner.Base(scan, 'JavaScanner', skeys=['.java']) # Local Variables: # tab-width:4 diff -Nru scons-4.4.0+dfsg/SCons/Scanner/JavaTests.py scons-4.5.2+dfsg/SCons/Scanner/JavaTests.py --- scons-4.4.0+dfsg/SCons/Scanner/JavaTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Scanner/JavaTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -33,18 +33,25 @@ test = TestCmd.TestCmd(workdir = '') -test.subdir('com') files = [ 'bootclasspath.jar', 'classpath.jar', 'Test.class', - 'com/Test.class' ] for fname in files: test.write(fname, "\n") +test.subdir('com') +test.subdir('java space') +subfiles = [ + 'com/Test.class', + 'java space/Test.class' +] + +for fname in subfiles: + test.write(fname.split('/'), "\n") class DummyEnvironment(collections.UserDict): def __init__(self,**kw): @@ -52,7 +59,7 @@ self.data.update(kw) self.fs = SCons.Node.FS.FS(test.workpath('')) self['ENV'] = {} - + def Dictionary(self, *args): return self.data @@ -80,7 +87,7 @@ def File(self, filename): return self.fs.File(filename) - + def Glob(self, path): return self.fs.Glob(path) @@ -111,8 +118,7 @@ class JavaScannerEmptyClasspath(unittest.TestCase): def runTest(self): path = [] - env = DummyEnvironment(JAVASUFFIXES=['.java'], - JAVACLASSPATH=path) + env = DummyEnvironment(JAVASUFFIXES=['.java'], JAVACLASSPATH=path) s = SCons.Scanner.Java.JavaScanner() deps = s(DummyNode('dummy'), env) expected = [] @@ -145,10 +151,97 @@ JAVACLASSPATH=[test.workpath()]) s = SCons.Scanner.Java.JavaScanner() deps = s(DummyNode('dummy'), env) - expected = ['Test.class', 'com/Test.class'] + expected = ['Test.class', 'com/Test.class', 'java space/Test.class'] + deps_match(self, deps, expected) + + +class JavaScannerNamedDirClasspath(unittest.TestCase): + def runTest(self): + env = DummyEnvironment( + JAVASUFFIXES=['.java'], + JAVACLASSPATH=[test.workpath('com'), test.workpath('java space')], + ) + s = SCons.Scanner.Java.JavaScanner() + deps = s(DummyNode('dummy'), env) + expected = ['com/Test.class', 'java space/Test.class'] deps_match(self, deps, expected) +class JavaScannerSearchPathClasspath(unittest.TestCase): + def runTest(self): + env = DummyEnvironment( + JAVASUFFIXES=['.java'], + JAVACLASSPATH=os.pathsep.join([test.workpath('com'), test.workpath('java space')]), + ) + s = SCons.Scanner.Java.JavaScanner() + deps = s(DummyNode('dummy'), env) + expected = ['com/Test.class', 'java space/Test.class'] + deps_match(self, deps, expected) + + +class JavaScannerEmptyProcessorpath(unittest.TestCase): + def runTest(self): + path = [] + env = DummyEnvironment(JAVASUFFIXES=['.java'], JAVAPROCESSORPATH=path) + s = SCons.Scanner.Java.JavaScanner() + deps = s(DummyNode('dummy'), env) + expected = [] + deps_match(self, deps, expected) + + +class JavaScannerProcessorpath(unittest.TestCase): + def runTest(self): + env = DummyEnvironment(JAVASUFFIXES=['.java'], + JAVAPROCESSORPATH=[test.workpath('classpath.jar')]) + s = SCons.Scanner.Java.JavaScanner() + deps = s(DummyNode('dummy'), env) + expected = ['classpath.jar'] + deps_match(self, deps, expected) + + +class JavaScannerWildcardProcessorpath(unittest.TestCase): + def runTest(self): + env = DummyEnvironment(JAVASUFFIXES=['.java'], + JAVAPROCESSORPATH=[test.workpath('*')]) + s = SCons.Scanner.Java.JavaScanner() + deps = s(DummyNode('dummy'), env) + expected = ['bootclasspath.jar', 'classpath.jar', 'Test.class'] + deps_match(self, deps, expected) + + +class JavaScannerDirProcessorpath(unittest.TestCase): + def runTest(self): + env = DummyEnvironment(JAVASUFFIXES=['.java'], + JAVAPROCESSORPATH=[test.workpath()]) + s = SCons.Scanner.Java.JavaScanner() + deps = s(DummyNode('dummy'), env) + expected = ['Test.class', 'com/Test.class', 'java space/Test.class'] + deps_match(self, deps, expected) + + +class JavaScannerNamedDirProcessorpath(unittest.TestCase): + def runTest(self): + env = DummyEnvironment( + JAVASUFFIXES=['.java'], + JAVAPROCESSORPATH=[test.workpath('com'), test.workpath('java space')], + ) + s = SCons.Scanner.Java.JavaScanner() + deps = s(DummyNode('dummy'), env) + expected = ['com/Test.class', 'java space/Test.class'] + deps_match(self, deps, expected) + + +class JavaScannerSearchPathProcessorpath(unittest.TestCase): + def runTest(self): + env = DummyEnvironment( + JAVASUFFIXES=['.java'], + JAVAPROCESSORPATH=os.pathsep.join([test.workpath('com'), test.workpath('java space')]), + ) + s = SCons.Scanner.Java.JavaScanner() + deps = s(DummyNode('dummy'), env) + expected = ['com/Test.class', 'java space/Test.class'] + deps_match(self, deps, expected) + if __name__ == "__main__": unittest.main() diff -Nru scons-4.4.0+dfsg/SCons/SConf.py scons-4.5.2+dfsg/SCons/SConf.py --- scons-4.4.0+dfsg/SCons/SConf.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/SConf.py 2023-03-21 16:17:04.000000000 +0000 @@ -43,7 +43,7 @@ import SCons.Action import SCons.Builder import SCons.Errors -import SCons.Job +import SCons.Taskmaster.Job import SCons.Node.FS import SCons.Taskmaster import SCons.Util @@ -227,6 +227,8 @@ This is almost the same as SCons.Script.BuildTask. Handles SConfErrors correctly and knows about the current cache_mode. """ + non_sconf_nodes = set() + def display(self, message): if sconf_global.logstream: sconf_global.logstream.write("scons: Configure: " + message + "\n") @@ -376,6 +378,25 @@ sconsign.set_entry(t.name, sconsign_entry) sconsign.merge() + def make_ready_current(self): + # We're overriding make_ready_current() call to add to the list + # of nodes used by this task, filtering out any nodes created + # by the checker for it's own purpose. + self.non_sconf_nodes.update([t for t in self.targets if not t.is_conftest()]) + super().make_ready_current() + make_ready = make_ready_current + + def postprocess(self): + # We're done executing this task, so now we'll go through all the + # nodes used by this task which aren't nodes created for + # Configure checkers, but rather are existing or built files + # and reset their node info. + # If we do not reset their node info, any changes in these + # nodes will not trigger builds in the normal build process + for node in self.non_sconf_nodes: + node.ninfo = node.new_ninfo() + super().postprocess() + class SConfBase: """This is simply a class to represent a configure context. After creating a SConf object, you can call any tests. After finished with your @@ -515,7 +536,7 @@ # the engine assumes the current path is the SConstruct directory ... old_fs_dir = SConfFS.getcwd() old_os_dir = os.getcwd() - SConfFS.chdir(SConfFS.Top, change_os_dir=1) + SConfFS.chdir(SConfFS.Top, change_os_dir=True) # Because we take responsibility here for writing out our # own .sconsign info (see SConfBuildTask.execute(), above), @@ -551,7 +572,7 @@ SConfFS.set_max_drift(0) tm = SCons.Taskmaster.Taskmaster(nodes, SConfBuildTask) # we don't want to build tests in parallel - jobs = SCons.Job.Jobs(1, tm ) + jobs = SCons.Taskmaster.Job.Jobs(1, tm) jobs.run() for n in nodes: state = n.get_state() @@ -562,7 +583,7 @@ finally: SConfFS.set_max_drift(save_max_drift) os.chdir(old_os_dir) - SConfFS.chdir(old_fs_dir, change_os_dir=0) + SConfFS.chdir(old_fs_dir, change_os_dir=False) if self.logstream is not None: # restore stdout / stderr sys.stdout = oldStdout @@ -772,7 +793,7 @@ tb = traceback.extract_stack()[-3-self.depth] old_fs_dir = SConfFS.getcwd() - SConfFS.chdir(SConfFS.Top, change_os_dir=0) + SConfFS.chdir(SConfFS.Top, change_os_dir=False) self.logstream.write('file %s,line %d:\n\tConfigure(confdir = %s)\n' % (tb[0], tb[1], str(self.confdir)) ) SConfFS.chdir(old_fs_dir) @@ -923,14 +944,20 @@ st, out = self.TryRun(text, ext) return not st, out - def AppendLIBS(self, lib_name_list): + def AppendLIBS(self, lib_name_list, unique=False): oldLIBS = self.env.get( 'LIBS', [] ) - self.env.Append(LIBS = lib_name_list) + if unique: + self.env.AppendUnique(LIBS = lib_name_list) + else: + self.env.Append(LIBS = lib_name_list) return oldLIBS - def PrependLIBS(self, lib_name_list): + def PrependLIBS(self, lib_name_list, unique=False): oldLIBS = self.env.get( 'LIBS', [] ) - self.env.Prepend(LIBS = lib_name_list) + if unique: + self.env.PrependUnique(LIBS = lib_name_list) + else: + self.env.Prepend(LIBS = lib_name_list) return oldLIBS def SetLIBS(self, val): @@ -1067,7 +1094,8 @@ def CheckLib(context, library = None, symbol = "main", - header = None, language = None, autoadd = 1): + header = None, language = None, autoadd=True, + append=True, unique=False) -> bool: """ A test for a library. See also CheckLibWithHeader. Note that library may also be None to test whether the given symbol @@ -1082,15 +1110,16 @@ # ToDo: accept path for the library res = SCons.Conftest.CheckLib(context, library, symbol, header = header, - language = language, autoadd = autoadd) - context.did_show_result = 1 + language = language, autoadd = autoadd, + append=append, unique=unique) + context.did_show_result = True return not res # XXX # Bram: Can only include one header and can't use #ifdef HAVE_HEADER_H. def CheckLibWithHeader(context, libs, header, language, - call = None, autoadd = 1): + call = None, autoadd=True, append=True, unique=False) -> bool: # ToDo: accept path for library. Support system header files. """ Another (more sophisticated) test for a library. @@ -1099,8 +1128,7 @@ As in CheckLib, we support library=None, to test if the call compiles without extra link flags. """ - prog_prefix, dummy = \ - createIncludesFromHeaders(header, 0) + prog_prefix, dummy = createIncludesFromHeaders(header, 0) if not libs: libs = [None] @@ -1108,7 +1136,8 @@ libs = [libs] res = SCons.Conftest.CheckLib(context, libs, None, prog_prefix, - call = call, language = language, autoadd = autoadd) + call = call, language = language, autoadd=autoadd, + append=append, unique=unique) context.did_show_result = 1 return not res diff -Nru scons-4.4.0+dfsg/SCons/SConfTests.py scons-4.5.2+dfsg/SCons/SConfTests.py --- scons-4.4.0+dfsg/SCons/SConfTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/SConfTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -32,6 +32,7 @@ sys.stdout = io.StringIO() +# a library that is sure to exist on the platform if sys.platform == 'win32': existing_lib = "msvcrt" else: @@ -78,9 +79,13 @@ # - cygwin on Windows (using cmd.exe, not bash) # - posix # - msvc on Windows (hopefully) - if (not self.scons_env.Detect( self.scons_env.subst('$CXX') ) or - not self.scons_env.Detect( self.scons_env.subst('$CC') ) or - not self.scons_env.Detect( self.scons_env.subst('$LINK') )): + if not all( + ( + self.scons_env.Detect(self.scons_env.subst('$CXX')), + self.scons_env.Detect(self.scons_env.subst('$CC')), + self.scons_env.Detect(self.scons_env.subst('$LINK')), + ) + ): raise Exception("This test needs an installed compiler!") if self.scons_env['CXX'] == 'g++': global existing_lib @@ -214,6 +219,8 @@ pass def get_stored_info(self): pass + def is_conftest(self): + return True def get_executor(self): class Executor: def __init__(self, targets): @@ -502,27 +509,59 @@ r = sconf.CheckLib( ["hopefullynolib",existing_lib], "main", autoadd=0 ) assert r, "did not find %s " % existing_lib - # CheckLib() with autoadd def libs(env): return env.get('LIBS', []) - env = sconf.env.Clone() - + # CheckLib() with combinations of autoadd, append try: - r = sconf.CheckLib( existing_lib, "main", autoadd=1 ) + env = sconf.env.Clone() + r = sconf.CheckLib(existing_lib, "main", autoadd=True, append=True) assert r, "did not find main in %s" % existing_lib expect = libs(env) + [existing_lib] got = libs(sconf.env) assert got == expect, "LIBS: expected %s, got %s" % (expect, got) + env = sconf.env.Clone() + r = sconf.CheckLib(existing_lib, "main", autoadd=True, append=False) + assert r, "did not find main in %s" % existing_lib + expect = [existing_lib] + libs(env) + got = libs(sconf.env) + assert got == expect, "LIBS: expected %s, got %s" % (expect, got) + sconf.env = env.Clone() - r = sconf.CheckLib( existing_lib, "main", autoadd=0 ) + r = sconf.CheckLib(existing_lib, "main", autoadd=False) assert r, "did not find main in %s" % existing_lib expect = libs(env) got = libs(sconf.env) assert got == expect, "before and after LIBS were not the same" finally: sconf.env = env + + # CheckLib() with unique + sconf.env.Append(LIBS=existing_lib) + try: + env = sconf.env.Clone() + r = sconf.CheckLib( + existing_lib, "main", autoadd=True, append=True, unique=False + ) + assert r, f"did not find main in {existing_lib}" + + expect = libs(env) + [existing_lib] + got = libs(sconf.env) + assert got == expect, f"LIBS: expected {expect}, got {got}" + + env = sconf.env.Clone() + r = sconf.CheckLib( + existing_lib, "main", autoadd=True, append=True, unique=True + ) + assert r, f"did not find main in {existing_lib}" + + expect = libs(env) + got = libs(sconf.env) + assert got == expect, f"LIBS: expected {expect}, got {got}" + finally: + sconf.env = env + finally: sconf.Finish() @@ -565,21 +604,33 @@ r = sconf.CheckLibWithHeader( [existing_lib,"hopefullynolib"], ["stdio.h", "math.h"], "C", autoadd=0 ) assert r, "did not find %s, #include stdio.h first" % existing_lib - # CheckLibWithHeader with autoadd def libs(env): return env.get('LIBS', []) - env = sconf.env.Clone() - + # CheckLibWithHeader with combinations of autoadd, append try: - r = sconf.CheckLibWithHeader( existing_lib, "math.h", "C", autoadd=1 ) + env = sconf.env.Clone() + r = sconf.CheckLibWithHeader( + existing_lib, "math.h", "C", autoadd=True, append=True + ) assert r, "did not find math.h with %s" % existing_lib expect = libs(env) + [existing_lib] got = libs(sconf.env) assert got == expect, "LIBS: expected %s, got %s" % (expect, got) sconf.env = env.Clone() - r = sconf.CheckLibWithHeader( existing_lib, "math.h", "C", autoadd=0 ) + r = sconf.CheckLibWithHeader( + existing_lib, "math.h", "C", autoadd=True, append=False + ) + assert r, "did not find math.h with %s" % existing_lib + expect = [existing_lib] + libs(env) + got = libs(sconf.env) + assert got == expect, "LIBS: expected %s, got %s" % (expect, got) + + sconf.env = env.Clone() + r = sconf.CheckLibWithHeader( + existing_lib, "math.h", "C", autoadd=False + ) assert r, "did not find math.h with %s" % existing_lib expect = libs(env) got = libs(sconf.env) @@ -587,6 +638,29 @@ finally: sconf.env = env + # CheckLibWithHeader() with unique + sconf.env.Append(LIBS=existing_lib) + try: + env = sconf.env.Clone() + r = sconf.CheckLibWithHeader( + existing_lib, "math.h", "C", autoadd=True, append=True, unique=False + ) + assert r, f"did not find main in {existing_lib}" + expect = libs(env) + [existing_lib] + got = libs(sconf.env) + assert got == expect, f"LIBS: expected {expect}, got {got}" + + env = sconf.env.Clone() + r = sconf.CheckLibWithHeader( + existing_lib, "math.h", "C", autoadd=True, append=True, unique=True + ) + assert r, f"did not find main in {existing_lib}" + expect = libs(env) + got = libs(sconf.env) + assert got == expect, f"LIBS: expected {expect}, got {got}" + finally: + sconf.env = env + finally: sconf.Finish() diff -Nru scons-4.4.0+dfsg/SCons/Script/__init__.py scons-4.5.2+dfsg/SCons/Script/__init__.py --- scons-4.4.0+dfsg/SCons/Script/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Script/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -107,6 +107,7 @@ PrintHelp = Main.PrintHelp GetOption = Main.GetOption SetOption = Main.SetOption +ValidateOptions = Main.ValidateOptions Progress = Main.Progress GetBuildFailures = Main.GetBuildFailures @@ -125,8 +126,7 @@ #profiling = Main.profiling #repositories = Main.repositories -from . import SConscript -_SConscript = SConscript +from . import SConscript as _SConscript call_stack = _SConscript.call_stack @@ -287,21 +287,25 @@ return SCons.Variables.Variables(files, args) -# The list of global functions to add to the SConscript name space -# that end up calling corresponding methods or Builders in the +# Adding global functions to the SConscript name space. +# +# Static functions that do not trigger initialization of +# DefaultEnvironment() and don't use its state. +EnsureSConsVersion = _SConscript.SConsEnvironment.EnsureSConsVersion +EnsurePythonVersion = _SConscript.SConsEnvironment.EnsurePythonVersion +Exit = _SConscript.SConsEnvironment.Exit +GetLaunchDir = _SConscript.SConsEnvironment.GetLaunchDir +SConscriptChdir = _SConscript.SConsEnvironment.SConscriptChdir + +# Functions that end up calling methods or Builders in the # DefaultEnvironment(). GlobalDefaultEnvironmentFunctions = [ # Methods from the SConsEnvironment class, above. 'Default', - 'EnsurePythonVersion', - 'EnsureSConsVersion', - 'Exit', 'Export', - 'GetLaunchDir', 'Help', 'Import', #'SConscript', is handled separately, below. - 'SConscriptChdir', # Methods from the Environment.Base class. 'AddPostAction', @@ -375,6 +379,8 @@ 'Package', ] +# DefaultEnvironmentCall() initializes DefaultEnvironment() if it is not +# created yet. for name in GlobalDefaultEnvironmentFunctions + GlobalDefaultBuilders: exec ("%s = _SConscript.DefaultEnvironmentCall(%s)" % (name, repr(name))) del name diff -Nru scons-4.4.0+dfsg/SCons/Script/Main.py scons-4.5.2+dfsg/SCons/Script/Main.py --- scons-4.4.0+dfsg/SCons/Script/Main.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Script/Main.py 2023-03-21 16:17:04.000000000 +0000 @@ -52,7 +52,7 @@ import SCons.Defaults import SCons.Environment import SCons.Errors -import SCons.Job +import SCons.Taskmaster.Job import SCons.Node import SCons.Node.FS import SCons.Platform @@ -492,6 +492,26 @@ def SetOption(name, value): return OptionsParser.values.set_option(name, value) + +def ValidateOptions(throw_exception=False) -> None: + """Validate options passed to SCons on the command line. + + If you call this after you set all your command line options with AddOption(), + it will verify that all command line options are valid. + So if you added an option --xyz and you call SCons with --xyy you can cause + SCons to issue an error message and exit by calling this function. + + :param bool throw_exception: (Optional) Should this function raise an error if there's an invalid option on the command line, or issue a message and exit with error status. + + :raises SConsBadOptionError: If throw_exception is True and there are invalid options on command line. + + .. versionadded:: 4.5.0 + """ + + OptionsParser.raise_exception_on_error = throw_exception + OptionsParser.preserve_unknown_options = False + OptionsParser.parse_args(OptionsParser.largs, OptionsParser.values) + def PrintHelp(file=None): OptionsParser.print_help(file=file) @@ -1114,7 +1134,7 @@ SCons.Node.FS.set_duplicate(options.duplicate) fs.set_max_drift(options.max_drift) - SCons.Job.explicit_stack_size = options.stack_size + SCons.Taskmaster.Job.explicit_stack_size = options.stack_size # Hash format and chunksize are set late to support SetOption being called # in a SConscript or SConstruct file. @@ -1271,23 +1291,12 @@ """Leave the order of dependencies alone.""" return dependencies - def tmtrace_cleanup(tfile): - tfile.close() - - if options.taskmastertrace_file == '-': - tmtrace = sys.stdout - elif options.taskmastertrace_file: - tmtrace = open(options.taskmastertrace_file, 'w') - atexit.register(tmtrace_cleanup, tmtrace) - else: - tmtrace = None - taskmaster = SCons.Taskmaster.Taskmaster(nodes, task_class, order, tmtrace) + taskmaster = SCons.Taskmaster.Taskmaster(nodes, task_class, order, options.taskmastertrace_file) # Let the BuildTask objects get at the options to respond to the # various print_* settings, tree_printer list, etc. BuildTask.options = options - is_pypy = platform.python_implementation() == 'PyPy' # As of 3.7, python removed support for threadless platforms. # See https://www.python.org/dev/peps/pep-0011/ @@ -1301,7 +1310,7 @@ # to check if python configured with threads. global num_jobs num_jobs = options.num_jobs - jobs = SCons.Job.Jobs(num_jobs, taskmaster) + jobs = SCons.Taskmaster.Job.Jobs(num_jobs, taskmaster) if num_jobs > 1: msg = None if jobs.num_jobs == 1 or not python_has_threads: @@ -1364,6 +1373,7 @@ else: _main(parser) + def main(): global OptionsParser global exit_status diff -Nru scons-4.4.0+dfsg/SCons/Script/Main.xml scons-4.5.2+dfsg/SCons/Script/Main.xml --- scons-4.4.0+dfsg/SCons/Script/Main.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Script/Main.xml 2023-03-21 16:17:04.000000000 +0000 @@ -316,18 +316,22 @@ -This function provides a way to query the value of -options which can be set via the command line or using the -&f-link-SetOption; function. +Query the value of settable options which may have been set +on the command line, or by using the &f-link-SetOption; function. +The value of the option is returned in a type matching how the +option was declared - see the documentation for the +corresponding command line option for information about each specific +option. + name can be an entry from the following table, which shows the corresponding command line arguments that could affect the value. name can be also be the destination variable name from a project-specific option added using the -&f-link-AddOption; function, as long as the addition -happens prior to the &f-GetOption; call in the SConscript files. +&f-link-AddOption; function, as long as that addition has been +processed prior to the &f-GetOption; call in the &SConscript; files. @@ -552,12 +556,6 @@ - - -See the documentation for the -corresponding command line option for information about each specific -option. - @@ -740,6 +738,7 @@ + (name, value) @@ -943,6 +942,68 @@ SetOption('max_drift', 0) + + + + + ([throw_exception=False]) + + + + + Check that all the options specified on the command line are either defined by SCons itself + or defined by calls to &f-link-AddOption;. + + + This function should only be called after the last &f-link-AddOption; call in your &SConscript; + logic. + + + Be aware that some tools call &f-link-AddOption;, if you are getting error messages for arguments + that they add, you will need to ensure that you load those tools before you call &f-ValidateOptions;. + + + If there are any command line options not defined, calling this function will cause SCons to issue an + error message and then exit with an error exit + status. + If the optional throw_exception is True, &f-ValidateOptions; will raise a + SConsBadOptionError + exception. This would allow the calling + &SConscript; logic can catch that exception and handle invalid options itself. + + + + Example: + + + +try: + ValidateOptions(throw_exception=True) +except SConsBadOptionError as e: + print("Parser is SConsOptionParser:%s" % (isinstance(e.parser, SConsOptionParser))) + print("Message is :%s" % e.opt_str) + Exit(3) + + + + This function is useful to force SCons to fail fast before you execute any expensive logic later in your + build logic. + For example if you specify build options via any flags, a simple typo could yield the incorrect build + option throughout your entire build. + + +scons --compilers=mingw (the correct flag is --compiler) + + + Could cause SCons to run configure steps with the incorrect compiler. Costing developer time trying to + track down why the configure logic failed with a compiler which should work. + + + New in version 4.5.0 + + + + diff -Nru scons-4.4.0+dfsg/SCons/Script/SConscript.py scons-4.5.2+dfsg/SCons/Script/SConscript.py --- scons-4.4.0+dfsg/SCons/Script/SConscript.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Script/SConscript.py 2023-03-21 16:17:04.000000000 +0000 @@ -57,7 +57,7 @@ global_exports = {} # chdir flag -sconscript_chdir = 1 +sconscript_chdir: bool = True def get_calling_namespaces(): """Return the locals and globals for the function that called @@ -205,7 +205,7 @@ # Change directory to the top of the source # tree to make sure the os's cwd and the cwd of # fs match so we can open the SConscript. - fs.chdir(top, change_os_dir=1) + fs.chdir(top, change_os_dir=True) if f.rexists(): actual = f.rfile() _file_ = open(actual.get_abspath(), "rb") @@ -254,7 +254,7 @@ # fs.chdir(), because we still need to # interpret the stuff within the SConscript file # relative to where we are logically. - fs.chdir(ldir, change_os_dir=0) + fs.chdir(ldir, change_os_dir=False) os.chdir(actual.dir.get_abspath()) # Append the SConscript directory to the beginning @@ -292,7 +292,7 @@ if old_file is not None: call_stack[-1].globals.update({__file__:old_file}) - + else: handle_missing_SConscript(f, kw.get('must_exist', None)) @@ -306,7 +306,7 @@ # There was no local directory, so chdir to the # Repository directory. Like above, we do this # directly. - fs.chdir(frame.prev_dir, change_os_dir=0) + fs.chdir(frame.prev_dir, change_os_dir=False) rdir = frame.prev_dir.rdir() rdir._create() # Make sure there's a directory there. try: @@ -385,12 +385,8 @@ # # Private methods of an SConsEnvironment. # - def _exceeds_version(self, major, minor, v_major, v_minor): - """Return 1 if 'major' and 'minor' are greater than the version - in 'v_major' and 'v_minor', and 0 otherwise.""" - return (major > v_major or (major == v_major and minor > v_minor)) - - def _get_major_minor_revision(self, version_string): + @staticmethod + def _get_major_minor_revision(version_string): """Split a version string into major, minor and (optionally) revision parts. @@ -488,14 +484,15 @@ def Default(self, *targets): SCons.Script._Set_Default_Targets(self, targets) - def EnsureSConsVersion(self, major, minor, revision=0): + @staticmethod + def EnsureSConsVersion(major, minor, revision=0): """Exit abnormally if the SCons version is not late enough.""" # split string to avoid replacement during build process if SCons.__version__ == '__' + 'VERSION__': SCons.Warnings.warn(SCons.Warnings.DevelopmentVersionWarning, "EnsureSConsVersion is ignored for development version") return - scons_ver = self._get_major_minor_revision(SCons.__version__) + scons_ver = SConsEnvironment._get_major_minor_revision(SCons.__version__) if scons_ver < (major, minor, revision): if revision: scons_ver_string = '%d.%d.%d' % (major, minor, revision) @@ -505,14 +502,16 @@ (scons_ver_string, SCons.__version__)) sys.exit(2) - def EnsurePythonVersion(self, major, minor): + @staticmethod + def EnsurePythonVersion(major, minor): """Exit abnormally if the Python version is not late enough.""" if sys.version_info < (major, minor): v = sys.version.split()[0] print("Python %d.%d or greater required, but you have Python %s" %(major,minor,v)) sys.exit(2) - def Exit(self, value=0): + @staticmethod + def Exit(value=0): sys.exit(value) def Export(self, *vars, **kw): @@ -520,7 +519,8 @@ global_exports.update(compute_exports(self.Split(var))) global_exports.update(kw) - def GetLaunchDir(self): + @staticmethod + def GetLaunchDir(): global launch_dir return launch_dir @@ -528,6 +528,7 @@ name = self.subst(name) return SCons.Script.Main.GetOption(name) + def Help(self, text, append=False): text = self.subst(text, raw=1) SCons.Script.HelpFunction(text, append=append) @@ -596,7 +597,8 @@ subst_kw['exports'] = exports return _SConscript(self.fs, *files, **subst_kw) - def SConscriptChdir(self, flag): + @staticmethod + def SConscriptChdir(flag: bool) -> None: global sconscript_chdir sconscript_chdir = flag diff -Nru scons-4.4.0+dfsg/SCons/Script/SConscript.xml scons-4.5.2+dfsg/SCons/Script/SConscript.xml --- scons-4.4.0+dfsg/SCons/Script/SConscript.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Script/SConscript.xml 2023-03-21 16:17:04.000000000 +0000 @@ -84,7 +84,7 @@ - + (major, minor) @@ -107,7 +107,7 @@ - + (major, minor, [revision]) @@ -137,7 +137,7 @@ - + ([value]) @@ -215,7 +215,7 @@ - + () diff -Nru scons-4.4.0+dfsg/SCons/Script/SConsOptions.py scons-4.5.2+dfsg/SCons/Script/SConsOptions.py --- scons-4.4.0+dfsg/SCons/Script/SConsOptions.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Script/SConsOptions.py 2023-03-21 16:17:04.000000000 +0000 @@ -40,7 +40,7 @@ diskcheck_all = SCons.Node.FS.diskcheck_types() -experimental_features = {'warp_speed', 'transporter', 'ninja'} +experimental_features = {'warp_speed', 'transporter', 'ninja', 'tm_v2'} def diskcheck_convert(value): @@ -54,7 +54,10 @@ if v == 'all': result = diskcheck_all elif v == 'none': - result = [] + # Don't use an empty list here as that fails the normal check + # to see if an optparse parser of if parser.argname: + # Changed to ['none'] as diskcheck expects a list value + result = ['none'] elif v in diskcheck_all: result.append(v) else: @@ -65,7 +68,7 @@ class SConsValues(optparse.Values): """ Holder class for uniform access to SCons options, regardless - of whether or not they can be set on the command line or in the + of whether they can be set on the command line or in the SConscript files (using the SetOption() function). A SCons option value can originate three different ways: @@ -289,14 +292,36 @@ return result +class SConsBadOptionError(optparse.BadOptionError): + """Exception used to indicate that invalid command line options were specified + + :ivar str opt_str: The offending option specified on command line which is not recognized + :ivar OptionParser parser: The active argument parser + + """ + + def __init__(self, opt_str, parser=None): + self.opt_str = opt_str + self.parser = parser + + def __str__(self): + return _("no such option: %s") % self.opt_str + + class SConsOptionParser(optparse.OptionParser): preserve_unknown_options = False + raise_exception_on_error = False def error(self, msg): - # overridden OptionValueError exception handler - self.print_usage(sys.stderr) - sys.stderr.write("SCons Error: %s\n" % msg) - sys.exit(2) + """ + overridden OptionValueError exception handler + """ + if self.raise_exception_on_error: + raise SConsBadOptionError(msg, self) + else: + self.print_usage(sys.stderr) + sys.stderr.write("SCons Error: %s\n" % msg) + sys.exit(2) def _process_long_opt(self, rargs, values): """ SCons-specific processing of long options. diff -Nru scons-4.4.0+dfsg/SCons/Subst.py scons-4.5.2+dfsg/SCons/Subst.py --- scons-4.4.0+dfsg/SCons/Subst.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Subst.py 2023-03-21 16:17:04.000000000 +0000 @@ -804,7 +804,8 @@ # space characters in the string result from the scons_subst() function. _space_sep = re.compile(r'[\t ]+(?![^{]*})') -def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None): + +def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None, overrides=False): """Expand a string or list containing construction variable substitutions. @@ -834,6 +835,10 @@ lvars = lvars.copy() lvars.update(d) + # Allow last ditch chance to override lvars + if overrides: + lvars.update(overrides) + # We're (most likely) going to eval() things. If Python doesn't # find a __builtins__ value in the global dictionary used for eval(), # it copies the current global values for you. Avoid this by @@ -882,7 +887,7 @@ return result -def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None): +def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None,overrides=False): """Substitute construction variables in a string (or list or other object) and separate the arguments into a command list. @@ -908,6 +913,10 @@ lvars = lvars.copy() lvars.update(d) + # Allow caller to specify last ditch override of lvars + if overrides: + lvars.update(overrides) + # We're (most likely) going to eval() things. If Python doesn't # find a __builtins__ value in the global dictionary used for eval(), # it copies the current global values for you. Avoid this by diff -Nru scons-4.4.0+dfsg/SCons/SubstTests.py scons-4.5.2+dfsg/SCons/SubstTests.py --- scons-4.4.0+dfsg/SCons/SubstTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/SubstTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -713,6 +713,14 @@ assert cmd_list[0][3] == "call", cmd_list[0][3] assert cmd_list[0][4] == "test", cmd_list[0][4] + + def test_subst_overriding_lvars_overrides(self): + """Test that optional passed arg overrides overrides gvars, and existing lvars.""" + env=DummyEnv({'XXX' : 'xxx'}) + result = scons_subst('$XXX', env, gvars=env.Dictionary(), overrides={'XXX': 'yyz'}) + assert result == 'yyz', result + + class scons_subst_list_TestCase(SubstTestCase): basic_cases = [ @@ -1102,6 +1110,13 @@ result = scons_subst_list('$XXX', env, gvars={'XXX' : 'yyy'}) assert result == [['yyy']], result + def test_subst_list_overriding_lvars_overrides(self): + """Test that optional passed arg overrides overrides gvars, and existing lvars.""" + env = DummyEnv({'XXX':'xxx'}) + result = scons_subst_list('$XXX', env, gvars=env.Dictionary(), overrides={'XXX': 'yyy'}) + assert result == [['yyy']], result + + class scons_subst_once_TestCase(unittest.TestCase): loc = { diff -Nru scons-4.4.0+dfsg/SCons/Taskmaster/__init__.py scons-4.5.2+dfsg/SCons/Taskmaster/__init__.py --- scons-4.4.0+dfsg/SCons/Taskmaster/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Taskmaster/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,1134 @@ +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +"""Generic Taskmaster module for the SCons build engine. + +This module contains the primary interface(s) between a wrapping user +interface and the SCons build engine. There are two key classes here: + +Taskmaster + This is the main engine for walking the dependency graph and + calling things to decide what does or doesn't need to be built. + +Task + This is the base class for allowing a wrapping interface to + decide what does or doesn't actually need to be done. The + intention is for a wrapping interface to subclass this as + appropriate for different types of behavior it may need. + + The canonical example is the SCons native Python interface, + which has Task subclasses that handle its specific behavior, + like printing "'foo' is up to date" when a top-level target + doesn't need to be built, and handling the -c option by removing + targets as its "build" action. There is also a separate subclass + for suppressing this output when the -q option is used. + + The Taskmaster instantiates a Task object for each (set of) + target(s) that it decides need to be evaluated and/or built. +""" +import io +import sys +from abc import ABC, abstractmethod +from itertools import chain +import logging + +import SCons.Errors +import SCons.Node +import SCons.Warnings +from SCons.Util import DispatchingFormatter + +StateString = SCons.Node.StateString +NODE_NO_STATE = SCons.Node.no_state +NODE_PENDING = SCons.Node.pending +NODE_EXECUTING = SCons.Node.executing +NODE_UP_TO_DATE = SCons.Node.up_to_date +NODE_EXECUTED = SCons.Node.executed +NODE_FAILED = SCons.Node.failed +print_prepare = False # set by option --debug=prepare + +# A subsystem for recording stats about how different Nodes are handled by +# the main Taskmaster loop. There's no external control here (no need for +# a --debug= option); enable it by changing the value of CollectStats. + +CollectStats = None + + +class Stats: + """ + A simple class for holding statistics about the disposition of a + Node by the Taskmaster. If we're collecting statistics, each Node + processed by the Taskmaster gets one of these attached, in which case + the Taskmaster records its decision each time it processes the Node. + (Ideally, that's just once per Node.) + """ + def __init__(self): + """ + Instantiates a Taskmaster.Stats object, initializing all + appropriate counters to zero. + """ + self.considered = 0 + self.already_handled = 0 + self.problem = 0 + self.child_failed = 0 + self.not_built = 0 + self.side_effects = 0 + self.build = 0 + + +StatsNodes = [] + +fmt = "%(considered)3d "\ + "%(already_handled)3d " \ + "%(problem)3d " \ + "%(child_failed)3d " \ + "%(not_built)3d " \ + "%(side_effects)3d " \ + "%(build)3d " + + +def dump_stats(): + for n in sorted(StatsNodes, key=lambda a: str(a)): + print((fmt % n.attributes.stats.__dict__) + str(n)) + + +class Task(ABC): + """ SCons build engine abstract task class. + + This controls the interaction of the actual building of node + and the rest of the engine. + + This is expected to handle all of the normally-customizable + aspects of controlling a build, so any given application + *should* be able to do what it wants by sub-classing this + class and overriding methods as appropriate. If an application + needs to customize something by sub-classing Taskmaster (or + some other build engine class), we should first try to migrate + that functionality into this class. + + Note that it's generally a good idea for sub-classes to call + these methods explicitly to update state, etc., rather than + roll their own interaction with Taskmaster from scratch. + """ + + LOGGER = None + + def __init__(self, tm, targets, top, node): + self.tm = tm + self.targets = targets + self.top = top + self.node = node + self.exc_clear() + + def trace_message(self, node, description='node'): + # This grabs the name of the function which calls trace_message() + method_name=sys._getframe(1).f_code.co_name+"():" + Task.LOGGER.debug('%-15s %s %s' % (method_name, description, self.tm.tm_trace_node(node))) + + def display(self, message): + """ + Hook to allow the calling interface to display a message. + + This hook gets called as part of preparing a task for execution + (that is, a Node to be built). As part of figuring out what Node + should be built next, the actual target list may be altered, + along with a message describing the alteration. The calling + interface can subclass Task and provide a concrete implementation + of this method to see those messages. + """ + pass + + def prepare(self): + """ + Called just before the task is executed. + + This is mainly intended to give the target Nodes a chance to + unlink underlying files and make all necessary directories before + the Action is actually called to build the targets. + """ + global print_prepare + T = self.tm.trace + if T: + self.trace_message(self.node) + + # Now that it's the appropriate time, give the TaskMaster a + # chance to raise any exceptions it encountered while preparing + # this task. + self.exception_raise() + + if self.tm.message: + self.display(self.tm.message) + self.tm.message = None + + # Let the targets take care of any necessary preparations. + # This includes verifying that all of the necessary sources + # and dependencies exist, removing the target file(s), etc. + # + # As of April 2008, the get_executor().prepare() method makes + # sure that all of the aggregate sources necessary to build this + # Task's target(s) exist in one up-front check. The individual + # target t.prepare() methods check that each target's explicit + # or implicit dependencies exists, and also initialize the + # .sconsign info. + executor = self.targets[0].get_executor() + if executor is None: + return + executor.prepare() + for t in executor.get_action_targets(): + if print_prepare: + print("Preparing target %s..."%t) + for s in t.side_effects: + print("...with side-effect %s..."%s) + t.prepare() + for s in t.side_effects: + if print_prepare: + print("...Preparing side-effect %s..."%s) + s.prepare() + + def get_target(self): + """Fetch the target being built or updated by this task. + """ + return self.node + + @abstractmethod + def needs_execute(self): + return + + def execute(self): + """ + Called to execute the task. + + This method is called from multiple threads in a parallel build, + so only do thread safe stuff here. Do thread unsafe stuff in + prepare(), executed() or failed(). + """ + T = self.tm.trace + if T: + self.trace_message(self.node) + + try: + cached_targets = [] + for t in self.targets: + if not t.retrieve_from_cache(): + break + cached_targets.append(t) + if len(cached_targets) < len(self.targets): + # Remove targets before building. It's possible that we + # partially retrieved targets from the cache, leaving + # them in read-only mode. That might cause the command + # to fail. + # + for t in cached_targets: + try: + t.fs.unlink(t.get_internal_path()) + except (IOError, OSError) as e: + SCons.Warnings.warn(SCons.Warnings.CacheCleanupErrorWarning, + "Failed copying all target files from cache, Error while attempting to remove file %s retrieved from cache: %s" % (t.get_internal_path(), e)) + self.targets[0].build() + else: + for t in cached_targets: + t.cached = 1 + except SystemExit: + exc_value = sys.exc_info()[1] + raise SCons.Errors.ExplicitExit(self.targets[0], exc_value.code) + except SCons.Errors.UserError: + raise + except SCons.Errors.BuildError: + raise + except Exception as e: + buildError = SCons.Errors.convert_to_BuildError(e) + buildError.node = self.targets[0] + buildError.exc_info = sys.exc_info() + raise buildError + + def executed_without_callbacks(self): + """ + Called when the task has been successfully executed + and the Taskmaster instance doesn't want to call + the Node's callback methods. + """ + T = self.tm.trace + if T: + self.trace_message(self.node) + + for t in self.targets: + if t.get_state() == NODE_EXECUTING: + for side_effect in t.side_effects: + side_effect.set_state(NODE_NO_STATE) + t.set_state(NODE_EXECUTED) + + def executed_with_callbacks(self): + """ + Called when the task has been successfully executed and + the Taskmaster instance wants to call the Node's callback + methods. + + This may have been a do-nothing operation (to preserve build + order), so we must check the node's state before deciding whether + it was "built", in which case we call the appropriate Node method. + In any event, we always call "visited()", which will handle any + post-visit actions that must take place regardless of whether + or not the target was an actual built target or a source Node. + """ + global print_prepare + T = self.tm.trace + if T: + self.trace_message(self.node) + + for t in self.targets: + if t.get_state() == NODE_EXECUTING: + for side_effect in t.side_effects: + side_effect.set_state(NODE_NO_STATE) + t.set_state(NODE_EXECUTED) + if not t.cached: + t.push_to_cache() + t.built() + t.visited() + if (not print_prepare and + (not hasattr(self, 'options') or not self.options.debug_includes)): + t.release_target_info() + else: + t.visited() + + executed = executed_with_callbacks + + def failed(self): + """ + Default action when a task fails: stop the build. + + Note: Although this function is normally invoked on nodes in + the executing state, it might also be invoked on up-to-date + nodes when using Configure(). + """ + self.fail_stop() + + def fail_stop(self): + """ + Explicit stop-the-build failure. + + This sets failure status on the target nodes and all of + their dependent parent nodes. + + Note: Although this function is normally invoked on nodes in + the executing state, it might also be invoked on up-to-date + nodes when using Configure(). + """ + T = self.tm.trace + if T: + self.trace_message(self.node) + + # Invoke will_not_build() to clean-up the pending children + # list. + self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED)) + + # Tell the taskmaster to not start any new tasks + self.tm.stop() + + # We're stopping because of a build failure, but give the + # calling Task class a chance to postprocess() the top-level + # target under which the build failure occurred. + self.targets = [self.tm.current_top] + self.top = 1 + + def fail_continue(self): + """ + Explicit continue-the-build failure. + + This sets failure status on the target nodes and all of + their dependent parent nodes. + + Note: Although this function is normally invoked on nodes in + the executing state, it might also be invoked on up-to-date + nodes when using Configure(). + """ + T = self.tm.trace + if T: + self.trace_message(self.node) + + self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED)) + + def make_ready_all(self): + """ + Marks all targets in a task ready for execution. + + This is used when the interface needs every target Node to be + visited--the canonical example being the "scons -c" option. + """ + T = self.tm.trace + if T: + self.trace_message(self.node) + + self.out_of_date = self.targets[:] + for t in self.targets: + t.disambiguate().set_state(NODE_EXECUTING) + for s in t.side_effects: + # add disambiguate here to mirror the call on targets above + s.disambiguate().set_state(NODE_EXECUTING) + + def make_ready_current(self): + """ + Marks all targets in a task ready for execution if any target + is not current. + + This is the default behavior for building only what's necessary. + """ + global print_prepare + T = self.tm.trace + if T: + T.log_handler.stream.write('\n') # Prefix message with new line. This is a hack + self.trace_message(self.node) + + self.out_of_date = [] + needs_executing = False + for t in self.targets: + try: + t.disambiguate().make_ready() + is_up_to_date = not t.has_builder() or \ + (not t.always_build and t.is_up_to_date()) + except EnvironmentError as e: + raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename) + + if not is_up_to_date: + self.out_of_date.append(t) + needs_executing = True + + if needs_executing: + for t in self.targets: + t.set_state(NODE_EXECUTING) + for s in t.side_effects: + # add disambiguate here to mirror the call on targets in first loop above + s.disambiguate().set_state(NODE_EXECUTING) + else: + for t in self.targets: + # We must invoke visited() to ensure that the node + # information has been computed before allowing the + # parent nodes to execute. (That could occur in a + # parallel build...) + t.visited() + t.set_state(NODE_UP_TO_DATE) + if (not print_prepare and + (not hasattr(self, 'options') or not self.options.debug_includes)): + t.release_target_info() + + make_ready = make_ready_current + + def postprocess(self): + """ + Post-processes a task after it's been executed. + + This examines all the targets just built (or not, we don't care + if the build was successful, or even if there was no build + because everything was up-to-date) to see if they have any + waiting parent Nodes, or Nodes waiting on a common side effect, + that can be put back on the candidates list. + """ + T = self.tm.trace + if T: + self.trace_message(self.node) + + # We may have built multiple targets, some of which may have + # common parents waiting for this build. Count up how many + # targets each parent was waiting for so we can subtract the + # values later, and so we *don't* put waiting side-effect Nodes + # back on the candidates list if the Node is also a waiting + # parent. + + targets = set(self.targets) + + pending_children = self.tm.pending_children + parents = {} + for t in targets: + # A node can only be in the pending_children set if it has + # some waiting_parents. + if t.waiting_parents: + if T: + self.trace_message(t, 'removing') + pending_children.discard(t) + for p in t.waiting_parents: + parents[p] = parents.get(p, 0) + 1 + t.waiting_parents = set() + + for t in targets: + if t.side_effects is not None: + for s in t.side_effects: + if s.get_state() == NODE_EXECUTING: + s.set_state(NODE_NO_STATE) + + # The side-effects may have been transferred to + # NODE_NO_STATE by executed_with{,out}_callbacks, but was + # not taken out of the waiting parents/pending children + # data structures. Check for that now. + if s.get_state() == NODE_NO_STATE and s.waiting_parents: + pending_children.discard(s) + for p in s.waiting_parents: + parents[p] = parents.get(p, 0) + 1 + s.waiting_parents = set() + for p in s.waiting_s_e: + if p.ref_count == 0: + self.tm.candidates.append(p) + + for p, subtract in parents.items(): + p.ref_count = p.ref_count - subtract + if T: + self.trace_message(p, 'adjusted parent ref count') + if p.ref_count == 0: + self.tm.candidates.append(p) + + for t in targets: + t.postprocess() + + # Exception handling subsystem. + # + # Exceptions that occur while walking the DAG or examining Nodes + # must be raised, but must be raised at an appropriate time and in + # a controlled manner so we can, if necessary, recover gracefully, + # possibly write out signature information for Nodes we've updated, + # etc. This is done by having the Taskmaster tell us about the + # exception, and letting + + def exc_info(self): + """ + Returns info about a recorded exception. + """ + return self.exception + + def exc_clear(self): + """ + Clears any recorded exception. + + This also changes the "exception_raise" attribute to point + to the appropriate do-nothing method. + """ + self.exception = (None, None, None) + self.exception_raise = self._no_exception_to_raise + + def exception_set(self, exception=None): + """ + Records an exception to be raised at the appropriate time. + + This also changes the "exception_raise" attribute to point + to the method that will, in fact + """ + if not exception: + exception = sys.exc_info() + self.exception = exception + self.exception_raise = self._exception_raise + + def _no_exception_to_raise(self): + pass + + def _exception_raise(self): + """ + Raises a pending exception that was recorded while getting a + Task ready for execution. + """ + exc = self.exc_info()[:] + try: + exc_type, exc_value, exc_traceback = exc + except ValueError: + exc_type, exc_value = exc # pylint: disable=unbalanced-tuple-unpacking + exc_traceback = None + + # raise exc_type(exc_value).with_traceback(exc_traceback) + if isinstance(exc_value, Exception): #hasattr(exc_value, 'with_traceback'): + # If exc_value is an exception, then just reraise + raise exc_value.with_traceback(exc_traceback) + else: + # else we'll create an exception using the value and raise that + raise exc_type(exc_value).with_traceback(exc_traceback) + + + # raise e.__class__, e.__class__(e), sys.exc_info()[2] + # exec("raise exc_type(exc_value).with_traceback(exc_traceback)") + + + +class AlwaysTask(Task): + def needs_execute(self): + """ + Always returns True (indicating this Task should always + be executed). + + Subclasses that need this behavior (as opposed to the default + of only executing Nodes that are out of date w.r.t. their + dependencies) can use this as follows: + + class MyTaskSubclass(SCons.Taskmaster.Task): + needs_execute = SCons.Taskmaster.AlwaysTask.needs_execute + """ + return True + +class OutOfDateTask(Task): + def needs_execute(self): + """ + Returns True (indicating this Task should be executed) if this + Task's target state indicates it needs executing, which has + already been determined by an earlier up-to-date check. + """ + return self.targets[0].get_state() == SCons.Node.executing + + +def find_cycle(stack, visited): + if stack[-1] in visited: + return None + visited.add(stack[-1]) + for n in stack[-1].waiting_parents: + stack.append(n) + if stack[0] == stack[-1]: + return stack + if find_cycle(stack, visited): + return stack + stack.pop() + return None + + +class Taskmaster: + """ + The Taskmaster for walking the dependency DAG. + """ + + def __init__(self, targets=[], tasker=None, order=None, trace=None): + self.original_top = targets + self.top_targets_left = targets[:] + self.top_targets_left.reverse() + self.candidates = [] + if tasker is None: + tasker = OutOfDateTask + self.tasker = tasker + if not order: + order = lambda l: l + self.order = order + self.message = None + self.next_candidate = self.find_next_candidate + self.pending_children = set() + self.trace = False + self.configure_trace(trace) + + def configure_trace(self, trace=None): + """ + This handles the command line option --taskmastertrace= + It can be: + - : output to stdout + : output to a file + False/None : Do not trace + """ + if not trace: + self.trace = False + return + + # TODO: May want to switch format to something like this. + # log_format = ( + # '%(relativeCreated)05dms' + # ':%(relfilename)s' + # ':%(funcName)s' + # '#%(lineno)s' + # ': %(message)s' + # ) + tm_formatter = logging.Formatter('Taskmaster: %(message)s') + if isinstance(trace, io.StringIO): + log_handler = logging.StreamHandler(trace) + elif trace == '-': + log_handler = logging.StreamHandler(sys.stdout) + elif trace: + log_handler = logging.FileHandler(filename=trace) + + logger = logging.getLogger('Taskmaster') + logger.setLevel(level=logging.DEBUG) + logger.addHandler(log_handler) + self.trace = logger + + logger.log_handler = log_handler + + # Now setup Task's logger. + tl = logging.getLogger("Task") + tl.setLevel(level=logging.DEBUG) + tl.addHandler(log_handler) + task_formatter = logging.Formatter('%(name)s.%(message)s') + Task.LOGGER = tl + + self.trace.log_handler = log_handler + + log_handler.setFormatter(DispatchingFormatter( + formatters={ + 'Taskmaster': tm_formatter, + 'Task': task_formatter, + 'Job': task_formatter, + }, + default_formatter=logging.Formatter('%(message)s') + )) + + def find_next_candidate(self): + """ + Returns the next candidate Node for (potential) evaluation. + + The candidate list (really a stack) initially consists of all of + the top-level (command line) targets provided when the Taskmaster + was initialized. While we walk the DAG, visiting Nodes, all the + children that haven't finished processing get pushed on to the + candidate list. Each child can then be popped and examined in + turn for whether *their* children are all up-to-date, in which + case a Task will be created for their actual evaluation and + potential building. + + Here is where we also allow candidate Nodes to alter the list of + Nodes that should be examined. This is used, for example, when + invoking SCons in a source directory. A source directory Node can + return its corresponding build directory Node, essentially saying, + "Hey, you really need to build this thing over here instead." + """ + try: + return self.candidates.pop() + except IndexError: + pass + try: + node = self.top_targets_left.pop() + except IndexError: + return None + self.current_top = node + alt, message = node.alter_targets() + if alt: + self.message = message + self.candidates.append(node) + self.candidates.extend(self.order(alt)) + node = self.candidates.pop() + return node + + def no_next_candidate(self): + """ + Stops Taskmaster processing by not returning a next candidate. + + Note that we have to clean-up the Taskmaster candidate list + because the cycle detection depends on the fact all nodes have + been processed somehow. + """ + while self.candidates: + candidates = self.candidates + self.candidates = [] + self.will_not_build(candidates) + return None + + def _validate_pending_children(self): + """ + Validate the content of the pending_children set. Assert if an + internal error is found. + + This function is used strictly for debugging the taskmaster by + checking that no invariants are violated. It is not used in + normal operation. + + The pending_children set is used to detect cycles in the + dependency graph. We call a "pending child" a child that is + found in the "pending" state when checking the dependencies of + its parent node. + + A pending child can occur when the Taskmaster completes a loop + through a cycle. For example, let's imagine a graph made of + three nodes (A, B and C) making a cycle. The evaluation starts + at node A. The Taskmaster first considers whether node A's + child B is up-to-date. Then, recursively, node B needs to + check whether node C is up-to-date. This leaves us with a + dependency graph looking like:: + + Next candidate \ + \ + Node A (Pending) --> Node B(Pending) --> Node C (NoState) + ^ | + | | + +-------------------------------------+ + + Now, when the Taskmaster examines the Node C's child Node A, + it finds that Node A is in the "pending" state. Therefore, + Node A is a pending child of node C. + + Pending children indicate that the Taskmaster has potentially + loop back through a cycle. We say potentially because it could + also occur when a DAG is evaluated in parallel. For example, + consider the following graph:: + + Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ... + | ^ + | | + +----------> Node D (NoState) --------+ + / + Next candidate / + + The Taskmaster first evaluates the nodes A, B, and C and + starts building some children of node C. Assuming, that the + maximum parallel level has not been reached, the Taskmaster + will examine Node D. It will find that Node C is a pending + child of Node D. + + In summary, evaluating a graph with a cycle will always + involve a pending child at one point. A pending child might + indicate either a cycle or a diamond-shaped DAG. Only a + fraction of the nodes ends-up being a "pending child" of + another node. This keeps the pending_children set small in + practice. + + We can differentiate between the two cases if we wait until + the end of the build. At this point, all the pending children + nodes due to a diamond-shaped DAG will have been properly + built (or will have failed to build). But, the pending + children involved in a cycle will still be in the pending + state. + + The taskmaster removes nodes from the pending_children set as + soon as a pending_children node moves out of the pending + state. This also helps to keep the pending_children set small. + """ + + for n in self.pending_children: + assert n.state in (NODE_PENDING, NODE_EXECUTING), \ + (str(n), StateString[n.state]) + assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents)) + for p in n.waiting_parents: + assert p.ref_count > 0, (str(n), str(p), p.ref_count) + + def tm_trace_node(self, node): + return('<%-10s %-3s %s>' % (StateString[node.get_state()], + node.ref_count, + repr(str(node)))) + + def _find_next_ready_node(self): + """ + Finds the next node that is ready to be built. + + This is *the* main guts of the DAG walk. We loop through the + list of candidates, looking for something that has no un-built + children (i.e., that is a leaf Node or has dependencies that are + all leaf Nodes or up-to-date). Candidate Nodes are re-scanned + (both the target Node itself and its sources, which are always + scanned in the context of a given target) to discover implicit + dependencies. A Node that must wait for some children to be + built will be put back on the candidates list after the children + have finished building. A Node that has been put back on the + candidates list in this way may have itself (or its sources) + re-scanned, in order to handle generated header files (e.g.) and + the implicit dependencies therein. + + Note that this method does not do any signature calculation or + up-to-date check itself. All of that is handled by the Task + class. This is purely concerned with the dependency graph walk. + """ + + self.ready_exc = None + + T = self.trace + if T: + T.log_handler.stream.write('\n') # Prefix message with new line. This is a hack + self.trace.debug('Looking for a node to evaluate') + + while True: + node = self.next_candidate() + if node is None: + if T: + self.trace.debug('No candidate anymore.') + return None + + node = node.disambiguate() + state = node.get_state() + + # For debugging only: + # + # try: + # self._validate_pending_children() + # except: + # self.ready_exc = sys.exc_info() + # return node + + if CollectStats: + if not hasattr(node.attributes, 'stats'): + node.attributes.stats = Stats() + StatsNodes.append(node) + S = node.attributes.stats + S.considered = S.considered + 1 + else: + S = None + + if T: + self.trace.debug(' Considering node %s and its children:' % self.tm_trace_node(node)) + + if state == NODE_NO_STATE: + # Mark this node as being on the execution stack: + node.set_state(NODE_PENDING) + elif state > NODE_PENDING: + # Skip this node if it has already been evaluated: + if S: S.already_handled = S.already_handled + 1 + if T: + self.trace.debug(' already handled (executed)') + continue + + executor = node.get_executor() + + try: + children = executor.get_all_children() + except SystemExit: + exc_value = sys.exc_info()[1] + e = SCons.Errors.ExplicitExit(node, exc_value.code) + self.ready_exc = (SCons.Errors.ExplicitExit, e) + if T: + self.trace.debug(' SystemExit') + return node + except Exception as e: + # We had a problem just trying to figure out the + # children (like a child couldn't be linked in to a + # VariantDir, or a Scanner threw something). Arrange to + # raise the exception when the Task is "executed." + self.ready_exc = sys.exc_info() + if S: S.problem = S.problem + 1 + if T: + self.trace.debug(' exception %s while scanning children.' % e) + return node + + children_not_visited = [] + children_pending = set() + children_not_ready = [] + children_failed = False + + for child in chain(executor.get_all_prerequisites(), children): + childstate = child.get_state() + + if T: + self.trace.debug(' ' + self.tm_trace_node(child)) + + if childstate == NODE_NO_STATE: + children_not_visited.append(child) + elif childstate == NODE_PENDING: + children_pending.add(child) + elif childstate == NODE_FAILED: + children_failed = True + + if childstate <= NODE_EXECUTING: + children_not_ready.append(child) + + # These nodes have not even been visited yet. Add + # them to the list so that on some next pass we can + # take a stab at evaluating them (or their children). + if children_not_visited: + if len(children_not_visited) > 1: + children_not_visited.reverse() + self.candidates.extend(self.order(children_not_visited)) + + # if T and children_not_visited: + # self.trace.debug(' adding to candidates: %s' % map(str, children_not_visited)) + # self.trace.debug(' candidates now: %s\n' % map(str, self.candidates)) + + # Skip this node if any of its children have failed. + # + # This catches the case where we're descending a top-level + # target and one of our children failed while trying to be + # built by a *previous* descent of an earlier top-level + # target. + # + # It can also occur if a node is reused in multiple + # targets. One first descends though the one of the + # target, the next time occurs through the other target. + # + # Note that we can only have failed_children if the + # --keep-going flag was used, because without it the build + # will stop before diving in the other branch. + # + # Note that even if one of the children fails, we still + # added the other children to the list of candidate nodes + # to keep on building (--keep-going). + if children_failed: + for n in executor.get_action_targets(): + n.set_state(NODE_FAILED) + + if S: S.child_failed = S.child_failed + 1 + if T: + self.trace.debug('****** %s' % self.tm_trace_node(node)) + continue + + if children_not_ready: + for child in children_not_ready: + # We're waiting on one or more derived targets + # that have not yet finished building. + if S: S.not_built = S.not_built + 1 + + # Add this node to the waiting parents lists of + # anything we're waiting on, with a reference + # count so we can be put back on the list for + # re-evaluation when they've all finished. + node.ref_count = node.ref_count + child.add_to_waiting_parents(node) + if T: + self.trace.debug(' adjusted ref count: %s, child %s' % + (self.tm_trace_node(node), repr(str(child)))) + + if T: + for pc in children_pending: + self.trace.debug(' adding %s to the pending children set' % + self.tm_trace_node(pc)) + self.pending_children = self.pending_children | children_pending + + continue + + # Skip this node if it has side-effects that are + # currently being built: + wait_side_effects = False + for se in executor.get_action_side_effects(): + if se.get_state() == NODE_EXECUTING: + se.add_to_waiting_s_e(node) + wait_side_effects = True + + if wait_side_effects: + if S: S.side_effects = S.side_effects + 1 + continue + + # The default when we've gotten through all of the checks above: + # this node is ready to be built. + if S: S.build = S.build + 1 + if T: + self.trace.debug('Evaluating %s' % self.tm_trace_node(node)) + + # For debugging only: + # + # try: + # self._validate_pending_children() + # except: + # self.ready_exc = sys.exc_info() + # return node + + return node + + return None + + def next_task(self): + """ + Returns the next task to be executed. + + This simply asks for the next Node to be evaluated, and then wraps + it in the specific Task subclass with which we were initialized. + """ + node = self._find_next_ready_node() + + if node is None: + return None + + executor = node.get_executor() + if executor is None: + return None + + tlist = executor.get_all_targets() + + task = self.tasker(self, tlist, node in self.original_top, node) + try: + task.make_ready() + except Exception as e : + # We had a problem just trying to get this task ready (like + # a child couldn't be linked to a VariantDir when deciding + # whether this node is current). Arrange to raise the + # exception when the Task is "executed." + self.ready_exc = sys.exc_info() + + if self.ready_exc: + task.exception_set(self.ready_exc) + + self.ready_exc = None + + return task + + def will_not_build(self, nodes, node_func=lambda n: None): + """ + Perform clean-up about nodes that will never be built. Invokes + a user defined function on all of these nodes (including all + of their parents). + """ + + T = self.trace + + pending_children = self.pending_children + + to_visit = set(nodes) + pending_children = pending_children - to_visit + + if T: + for n in nodes: + self.trace.debug(' removing node %s from the pending children set\n' % + self.tm_trace_node(n)) + try: + while len(to_visit): + node = to_visit.pop() + node_func(node) + + # Prune recursion by flushing the waiting children + # list immediately. + parents = node.waiting_parents + node.waiting_parents = set() + + to_visit = to_visit | parents + pending_children = pending_children - parents + + for p in parents: + p.ref_count = p.ref_count - 1 + if T: + self.trace.debug(' removing parent %s from the pending children set\n' % + self.tm_trace_node(p)) + except KeyError: + # The container to_visit has been emptied. + pass + + # We have the stick back the pending_children list into the + # taskmaster because the python 1.5.2 compatibility does not + # allow us to use in-place updates + self.pending_children = pending_children + + def stop(self): + """ + Stops the current build completely. + """ + self.next_candidate = self.no_next_candidate + + def cleanup(self): + """ + Check for dependency cycles. + """ + if not self.pending_children: + return + + nclist = [(n, find_cycle([n], set())) for n in self.pending_children] + + genuine_cycles = [ + node for node,cycle in nclist + if cycle or node.get_state() != NODE_EXECUTED + ] + if not genuine_cycles: + # All of the "cycles" found were single nodes in EXECUTED state, + # which is to say, they really weren't cycles. Just return. + return + + desc = 'Found dependency cycle(s):\n' + for node, cycle in nclist: + if cycle: + desc = desc + " " + " -> ".join(map(str, cycle)) + "\n" + else: + desc = desc + \ + " Internal Error: no cycle found for node %s (%s) in state %s\n" % \ + (node, repr(node), StateString[node.get_state()]) + + raise SCons.Errors.UserError(desc) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/SCons/Taskmaster/Job.py scons-4.5.2+dfsg/SCons/Taskmaster/Job.py --- scons-4.4.0+dfsg/SCons/Taskmaster/Job.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Taskmaster/Job.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,746 @@ +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +"""Serial and Parallel classes to execute build tasks. + +The Jobs class provides a higher level interface to start, +stop, and wait on jobs. +""" + +import SCons.compat + +import logging +import os +import signal +import sys +import threading + +from enum import Enum + +import SCons.Errors +import SCons.Warnings + + +# The default stack size (in kilobytes) of the threads used to execute +# jobs in parallel. +# +# We use a stack size of 256 kilobytes. The default on some platforms +# is too large and prevents us from creating enough threads to fully +# parallelized the build. For example, the default stack size on linux +# is 8 MBytes. + +explicit_stack_size = None +default_stack_size = 256 + +interrupt_msg = 'Build interrupted.' + +class InterruptState: + def __init__(self): + self.interrupted = False + + def set(self): + self.interrupted = True + + def __call__(self): + return self.interrupted + + +class Jobs: + """An instance of this class initializes N jobs, and provides + methods for starting, stopping, and waiting on all N jobs. + """ + + def __init__(self, num, taskmaster): + """ + Create 'num' jobs using the given taskmaster. + + If 'num' is 1 or less, then a serial job will be used, + otherwise a parallel job with 'num' worker threads will + be used. + + The 'num_jobs' attribute will be set to the actual number of jobs + allocated. If more than one job is requested but the Parallel + class can't do it, it gets reset to 1. Wrapping interfaces that + care should check the value of 'num_jobs' after initialization. + """ + + # Importing GetOption here instead of at top of file to avoid + # circular imports + # pylint: disable=import-outside-toplevel + from SCons.Script import GetOption + + self.job = None + if num > 1: + stack_size = explicit_stack_size + if stack_size is None: + stack_size = default_stack_size + + try: + experimental_option = GetOption('experimental') + if 'tm_v2' in experimental_option: + self.job = NewParallel(taskmaster, num, stack_size) + else: + self.job = LegacyParallel(taskmaster, num, stack_size) + + self.num_jobs = num + except NameError: + pass + if self.job is None: + self.job = Serial(taskmaster) + self.num_jobs = 1 + + def run(self, postfunc=lambda: None): + """Run the jobs. + + postfunc() will be invoked after the jobs has run. It will be + invoked even if the jobs are interrupted by a keyboard + interrupt (well, in fact by a signal such as either SIGINT, + SIGTERM or SIGHUP). The execution of postfunc() is protected + against keyboard interrupts and is guaranteed to run to + completion.""" + self._setup_sig_handler() + try: + self.job.start() + finally: + postfunc() + self._reset_sig_handler() + + def were_interrupted(self): + """Returns whether the jobs were interrupted by a signal.""" + return self.job.interrupted() + + def _setup_sig_handler(self): + """Setup an interrupt handler so that SCons can shutdown cleanly in + various conditions: + + a) SIGINT: Keyboard interrupt + b) SIGTERM: kill or system shutdown + c) SIGHUP: Controlling shell exiting + + We handle all of these cases by stopping the taskmaster. It + turns out that it's very difficult to stop the build process + by throwing asynchronously an exception such as + KeyboardInterrupt. For example, the python Condition + variables (threading.Condition) and queues do not seem to be + asynchronous-exception-safe. It would require adding a whole + bunch of try/finally block and except KeyboardInterrupt all + over the place. + + Note also that we have to be careful to handle the case when + SCons forks before executing another process. In that case, we + want the child to exit immediately. + """ + def handler(signum, stack, self=self, parentpid=os.getpid()): + if os.getpid() == parentpid: + self.job.taskmaster.stop() + self.job.interrupted.set() + else: + os._exit(2) # pylint: disable=protected-access + + self.old_sigint = signal.signal(signal.SIGINT, handler) + self.old_sigterm = signal.signal(signal.SIGTERM, handler) + try: + self.old_sighup = signal.signal(signal.SIGHUP, handler) + except AttributeError: + pass + if (self.old_sigint is None) or (self.old_sigterm is None) or \ + (hasattr(self, "old_sighup") and self.old_sighup is None): + msg = "Overwritting previous signal handler which was not installed from Python. " + \ + "Will not be able to reinstate and so will return to default handler." + SCons.Warnings.warn(SCons.Warnings.SConsWarning, msg) + + def _reset_sig_handler(self): + """Restore the signal handlers to their previous state (before the + call to _setup_sig_handler().""" + sigint_to_use = self.old_sigint if self.old_sigint is not None else signal.SIG_DFL + sigterm_to_use = self.old_sigterm if self.old_sigterm is not None else signal.SIG_DFL + signal.signal(signal.SIGINT, sigint_to_use) + signal.signal(signal.SIGTERM, sigterm_to_use) + try: + sigterm_to_use = self.old_sighup if self.old_sighup is not None else signal.SIG_DFL + signal.signal(signal.SIGHUP, sigterm_to_use) + except AttributeError: + pass + + +class Serial: + """This class is used to execute tasks in series, and is more efficient + than Parallel, but is only appropriate for non-parallel builds. Only + one instance of this class should be in existence at a time. + + This class is not thread safe. + """ + + def __init__(self, taskmaster): + """Create a new serial job given a taskmaster. + + The taskmaster's next_task() method should return the next task + that needs to be executed, or None if there are no more tasks. The + taskmaster's executed() method will be called for each task when it + is successfully executed, or failed() will be called if it failed to + execute (e.g. execute() raised an exception).""" + + self.taskmaster = taskmaster + self.interrupted = InterruptState() + + def start(self): + """Start the job. This will begin pulling tasks from the taskmaster + and executing them, and return when there are no more tasks. If a task + fails to execute (i.e. execute() raises an exception), then the job will + stop.""" + + while True: + task = self.taskmaster.next_task() + + if task is None: + break + + try: + task.prepare() + if task.needs_execute(): + task.execute() + except Exception: + if self.interrupted(): + try: + raise SCons.Errors.BuildError( + task.targets[0], errstr=interrupt_msg) + except Exception: + task.exception_set() + else: + task.exception_set() + + # Let the failed() callback function arrange for the + # build to stop if that's appropriate. + task.failed() + else: + task.executed() + + task.postprocess() + self.taskmaster.cleanup() + + +# Trap import failure so that everything in the Job module but the +# Parallel class (and its dependent classes) will work if the interpreter +# doesn't support threads. +try: + import queue + import threading +except ImportError: + pass +else: + class Worker(threading.Thread): + """A worker thread waits on a task to be posted to its request queue, + dequeues the task, executes it, and posts a tuple including the task + and a boolean indicating whether the task executed successfully. """ + + def __init__(self, requestQueue, resultsQueue, interrupted): + super().__init__() + self.daemon = True + self.requestQueue = requestQueue + self.resultsQueue = resultsQueue + self.interrupted = interrupted + self.start() + + def run(self): + while True: + task = self.requestQueue.get() + + if task is None: + # The "None" value is used as a sentinel by + # ThreadPool.cleanup(). This indicates that there + # are no more tasks, so we should quit. + break + + try: + if self.interrupted(): + raise SCons.Errors.BuildError( + task.targets[0], errstr=interrupt_msg) + task.execute() + except Exception: + task.exception_set() + ok = False + else: + ok = True + + self.resultsQueue.put((task, ok)) + + class ThreadPool: + """This class is responsible for spawning and managing worker threads.""" + + def __init__(self, num, stack_size, interrupted): + """Create the request and reply queues, and 'num' worker threads. + + One must specify the stack size of the worker threads. The + stack size is specified in kilobytes. + """ + self.requestQueue = queue.Queue(0) + self.resultsQueue = queue.Queue(0) + + try: + prev_size = threading.stack_size(stack_size * 1024) + except AttributeError as e: + # Only print a warning if the stack size has been + # explicitly set. + if explicit_stack_size is not None: + msg = "Setting stack size is unsupported by this version of Python:\n " + \ + e.args[0] + SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) + except ValueError as e: + msg = "Setting stack size failed:\n " + str(e) + SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) + + # Create worker threads + self.workers = [] + for _ in range(num): + worker = Worker(self.requestQueue, self.resultsQueue, interrupted) + self.workers.append(worker) + + if 'prev_size' in locals(): + threading.stack_size(prev_size) + + def put(self, task): + """Put task into request queue.""" + self.requestQueue.put(task) + + def get(self): + """Remove and return a result tuple from the results queue.""" + return self.resultsQueue.get() + + def preparation_failed(self, task): + self.resultsQueue.put((task, False)) + + def cleanup(self): + """ + Shuts down the thread pool, giving each worker thread a + chance to shut down gracefully. + """ + # For each worker thread, put a sentinel "None" value + # on the requestQueue (indicating that there's no work + # to be done) so that each worker thread will get one and + # terminate gracefully. + for _ in self.workers: + self.requestQueue.put(None) + + # Wait for all of the workers to terminate. + # + # If we don't do this, later Python versions (2.4, 2.5) often + # seem to raise exceptions during shutdown. This happens + # in requestQueue.get(), as an assertion failure that + # requestQueue.not_full is notified while not acquired, + # seemingly because the main thread has shut down (or is + # in the process of doing so) while the workers are still + # trying to pull sentinels off the requestQueue. + # + # Normally these terminations should happen fairly quickly, + # but we'll stick a one-second timeout on here just in case + # someone gets hung. + for worker in self.workers: + worker.join(1.0) + self.workers = [] + + class LegacyParallel: + """This class is used to execute tasks in parallel, and is somewhat + less efficient than Serial, but is appropriate for parallel builds. + + This class is thread safe. + """ + + def __init__(self, taskmaster, num, stack_size): + """Create a new parallel job given a taskmaster. + + The taskmaster's next_task() method should return the next + task that needs to be executed, or None if there are no more + tasks. The taskmaster's executed() method will be called + for each task when it is successfully executed, or failed() + will be called if the task failed to execute (i.e. execute() + raised an exception). + + Note: calls to taskmaster are serialized, but calls to + execute() on distinct tasks are not serialized, because + that is the whole point of parallel jobs: they can execute + multiple tasks simultaneously. """ + + self.taskmaster = taskmaster + self.interrupted = InterruptState() + self.tp = ThreadPool(num, stack_size, self.interrupted) + + self.maxjobs = num + + def start(self): + """Start the job. This will begin pulling tasks from the + taskmaster and executing them, and return when there are no + more tasks. If a task fails to execute (i.e. execute() raises + an exception), then the job will stop.""" + + jobs = 0 + + while True: + # Start up as many available tasks as we're + # allowed to. + while jobs < self.maxjobs: + task = self.taskmaster.next_task() + if task is None: + break + + try: + # prepare task for execution + task.prepare() + except Exception: + task.exception_set() + task.failed() + task.postprocess() + else: + if task.needs_execute(): + # dispatch task + self.tp.put(task) + jobs += 1 + else: + task.executed() + task.postprocess() + + if not task and not jobs: + break + + # Let any/all completed tasks finish up before we go + # back and put the next batch of tasks on the queue. + while True: + task, ok = self.tp.get() + jobs -= 1 + + if ok: + task.executed() + else: + if self.interrupted(): + try: + raise SCons.Errors.BuildError( + task.targets[0], errstr=interrupt_msg) + except Exception: + task.exception_set() + + # Let the failed() callback function arrange + # for the build to stop if that's appropriate. + task.failed() + + task.postprocess() + + if self.tp.resultsQueue.empty(): + break + + self.tp.cleanup() + self.taskmaster.cleanup() + + # An experimental new parallel scheduler that uses a leaders/followers pattern. + class NewParallel: + + class State(Enum): + READY = 0 + SEARCHING = 1 + STALLED = 2 + COMPLETED = 3 + + class Worker(threading.Thread): + def __init__(self, owner): + super().__init__() + self.daemon = True + self.owner = owner + self.start() + + def run(self): + self.owner._work() + + def __init__(self, taskmaster, num, stack_size): + self.taskmaster = taskmaster + self.num_workers = num + self.stack_size = stack_size + self.interrupted = InterruptState() + self.workers = [] + + # The `tm_lock` is what ensures that we only have one + # thread interacting with the taskmaster at a time. It + # also protects access to our state that gets updated + # concurrently. The `can_search_cv` is associated with + # this mutex. + self.tm_lock = threading.Lock() + + # Guarded under `tm_lock`. + self.jobs = 0 + self.state = NewParallel.State.READY + + # The `can_search_cv` is used to manage a leader / + # follower pattern for access to the taskmaster, and to + # awaken from stalls. + self.can_search_cv = threading.Condition(self.tm_lock) + + # The queue of tasks that have completed execution. The + # next thread to obtain `tm_lock`` will retire them. + self.results_queue_lock = threading.Lock() + self.results_queue = [] + + if self.taskmaster.trace: + self.trace = self._setup_logging() + else: + self.trace = False + + def _setup_logging(self): + jl = logging.getLogger("Job") + jl.setLevel(level=logging.DEBUG) + jl.addHandler(self.taskmaster.trace.log_handler) + return jl + + def trace_message(self, message): + # This grabs the name of the function which calls trace_message() + method_name = sys._getframe(1).f_code.co_name + "():" + thread_id=threading.get_ident() + self.trace.debug('%s.%s [Thread:%s] %s' % (type(self).__name__, method_name, thread_id, message)) + # print('%-15s %s' % (method_name, message)) + + def start(self): + self._start_workers() + for worker in self.workers: + worker.join() + self.workers = [] + self.taskmaster.cleanup() + + def _start_workers(self): + prev_size = self._adjust_stack_size() + for _ in range(self.num_workers): + self.workers.append(NewParallel.Worker(self)) + self._restore_stack_size(prev_size) + + def _adjust_stack_size(self): + try: + prev_size = threading.stack_size(self.stack_size * 1024) + return prev_size + except AttributeError as e: + # Only print a warning if the stack size has been + # explicitly set. + if explicit_stack_size is not None: + msg = "Setting stack size is unsupported by this version of Python:\n " + \ + e.args[0] + SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) + except ValueError as e: + msg = "Setting stack size failed:\n " + str(e) + SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) + + return None + + def _restore_stack_size(self, prev_size): + if prev_size is not None: + threading.stack_size(prev_size) + + def _work(self): + + task = None + + while True: + + # Obtain `tm_lock`, granting exclusive access to the taskmaster. + with self.can_search_cv: + + if self.trace: + self.trace_message("Gained exclusive access") + + # Capture whether we got here with `task` set, + # then drop our reference to the task as we are no + # longer interested in the actual object. + completed_task = (task is not None) + task = None + + # We will only have `completed_task` set here if + # we have looped back after executing a task. If + # we have completed a task and find that we are + # stalled, we should speculatively indicate that + # we are no longer stalled by transitioning to the + # 'ready' state which will bypass the condition + # wait so that we immediately process the results + # queue and hopefully light up new + # work. Otherwise, stay stalled, and we will wait + # in the condvar. Some other thread will come back + # here with a completed task. + if self.state == NewParallel.State.STALLED and completed_task: + if self.trace: + self.trace_message("Detected stall with completed task, bypassing wait") + self.state = NewParallel.State.READY + + # Wait until we are neither searching nor stalled. + while self.state == NewParallel.State.SEARCHING or self.state == NewParallel.State.STALLED: + if self.trace: + self.trace_message("Search already in progress, waiting") + self.can_search_cv.wait() + + # If someone set the completed flag, bail. + if self.state == NewParallel.State.COMPLETED: + if self.trace: + self.trace_message("Completion detected, breaking from main loop") + break + + # Set the searching flag to indicate that a thread + # is currently in the critical section for + # taskmaster work. + # + if self.trace: + self.trace_message("Starting search") + self.state = NewParallel.State.SEARCHING + + # Bulk acquire the tasks in the results queue + # under the result queue lock, then process them + # all outside that lock. We need to process the + # tasks in the results queue before looking for + # new work because we might be unable to find new + # work if we don't. + results_queue = [] + with self.results_queue_lock: + results_queue, self.results_queue = self.results_queue, results_queue + + if self.trace: + self.trace_message("Found {len(results_queue)} completed tasks to process") + for (rtask, rresult) in results_queue: + if rresult: + rtask.executed() + else: + if self.interrupted(): + try: + raise SCons.Errors.BuildError( + rtask.targets[0], errstr=interrupt_msg) + except Exception: + rtask.exception_set() + + # Let the failed() callback function arrange + # for the build to stop if that's appropriate. + rtask.failed() + + rtask.postprocess() + self.jobs -= 1 + + # We are done with any task objects that were in + # the results queue. + results_queue.clear() + + # Now, turn the crank on the taskmaster until we + # either run out of tasks, or find a task that + # needs execution. If we run out of tasks, go idle + # until results arrive if jobs are pending, or + # mark the walk as complete if not. + while self.state == NewParallel.State.SEARCHING: + if self.trace: + self.trace_message("Searching for new tasks") + task = self.taskmaster.next_task() + + if task: + # We found a task. Walk it through the + # task lifecycle. If it does not need + # execution, just complete the task and + # look for the next one. Otherwise, + # indicate that we are no longer searching + # so we can drop out of this loop, execute + # the task outside the lock, and allow + # another thread in to search. + try: + task.prepare() + except Exception: + task.exception_set() + task.failed() + task.postprocess() + else: + if not task.needs_execute(): + if self.trace: + self.trace_message("Found internal task") + task.executed() + task.postprocess() + else: + self.jobs += 1 + if self.trace: + self.trace_message("Found task requiring execution") + self.state = NewParallel.State.READY + self.can_search_cv.notify() + + else: + # We failed to find a task, so this thread + # cannot continue turning the taskmaster + # crank. We must exit the loop. + if self.jobs: + # No task was found, but there are + # outstanding jobs executing that + # might unblock new tasks when they + # complete. Transition to the stalled + # state. We do not need a notify, + # because we know there are threads + # outstanding that will re-enter the + # loop. + # + if self.trace: + self.trace_message("Found no task requiring execution, but have jobs: marking stalled") + self.state = NewParallel.State.STALLED + else: + # We didn't find a task and there are + # no jobs outstanding, so there is + # nothing that will ever return + # results which might unblock new + # tasks. We can conclude that the walk + # is complete. Update our state to + # note completion and awaken anyone + # sleeping on the condvar. + # + if self.trace: + self.trace_message("Found no task requiring execution, and have no jobs: marking complete") + self.state = NewParallel.State.COMPLETED + self.can_search_cv.notify_all() + + # We no longer hold `tm_lock` here. If we have a task, + # we can now execute it. If there are threads waiting + # to search, one of them can now begin turning the + # taskmaster crank in NewParallel. + if task: + if self.trace: + self.trace_message("Executing task") + ok = True + try: + if self.interrupted(): + raise SCons.Errors.BuildError( + task.targets[0], errstr=interrupt_msg) + task.execute() + except Exception: + ok = False + task.exception_set() + + # Grab the results queue lock and enqueue the + # executed task and state. The next thread into + # the searching loop will complete the + # postprocessing work under the taskmaster lock. + # + if self.trace: + self.trace_message("Enqueueing executed task results") + with self.results_queue_lock: + self.results_queue.append((task, ok)) + + # Tricky state "fallthrough" here. We are going back + # to the top of the loop, which behaves differently + # depending on whether `task` is set. Do not perturb + # the value of the `task` variable if you add new code + # after this comment. + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/SCons/Taskmaster/JobTests.py scons-4.5.2+dfsg/SCons/Taskmaster/JobTests.py --- scons-4.4.0+dfsg/SCons/Taskmaster/JobTests.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Taskmaster/JobTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,580 @@ +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +import unittest +import random +import math +import os + +import SCons.Taskmaster.Job +from SCons.Script.Main import OptionsParser + + +def get_cpu_nums(): + # Linux, Unix and MacOS: + if hasattr( os, "sysconf" ): + if "SC_NPROCESSORS_ONLN" in os.sysconf_names: + # Linux & Unix: + ncpus = os.sysconf( "SC_NPROCESSORS_ONLN" ) + if isinstance(ncpus, int) and ncpus > 0: + return ncpus + else: # OSX: + return int(os.popen2("sysctl -n hw.ncpu")[1].read() ) + # Windows: + if "NUMBER_OF_PROCESSORS" in os.environ: + ncpus = int(os.environ["NUMBER_OF_PROCESSORS"]) + if ncpus > 0: + return ncpus + return 1 # Default + +# a large number +num_sines = 500 + +# how many parallel jobs to perform for the test +num_jobs = get_cpu_nums()*2 + +# in case we werent able to detect num cpus for this test +# just make a hardcoded suffcient large number, though not future proof +if num_jobs == 2: + num_jobs = 33 + +# how many tasks to perform for the test +num_tasks = num_jobs*5 + +class DummyLock: + """fake lock class to use if threads are not supported""" + def acquire(self): + pass + + def release(self): + pass + +class NoThreadsException(Exception): + """raised by the ParallelTestCase if threads are not supported""" + + def __str__(self): + return "the interpreter doesn't support threads" + +class Task: + """A dummy task class for testing purposes.""" + + def __init__(self, i, taskmaster): + self.i = i + self.taskmaster = taskmaster + self.was_executed = 0 + self.was_prepared = 0 + + def prepare(self): + self.was_prepared = 1 + + def _do_something(self): + pass + + def needs_execute(self): + return True + + def execute(self): + self.taskmaster.test_case.assertTrue(self.was_prepared, + "the task wasn't prepared") + + self.taskmaster.guard.acquire() + self.taskmaster.begin_list.append(self.i) + self.taskmaster.guard.release() + + # while task is executing, represent this in the parallel_list + # and then turn it off + self.taskmaster.parallel_list[self.i] = 1 + self._do_something() + self.taskmaster.parallel_list[self.i] = 0 + + # check if task was executing while another was also executing + for j in range(1, self.taskmaster.num_tasks): + if self.taskmaster.parallel_list[j + 1] == 1: + self.taskmaster.found_parallel = True + break + + self.was_executed = 1 + + self.taskmaster.guard.acquire() + self.taskmaster.end_list.append(self.i) + self.taskmaster.guard.release() + + def executed(self): + self.taskmaster.num_executed = self.taskmaster.num_executed + 1 + + self.taskmaster.test_case.assertTrue(self.was_prepared, + "the task wasn't prepared") + self.taskmaster.test_case.assertTrue(self.was_executed, + "the task wasn't really executed") + self.taskmaster.test_case.assertTrue(isinstance(self, Task), + "the task wasn't really a Task instance") + + def failed(self): + self.taskmaster.num_failed = self.taskmaster.num_failed + 1 + self.taskmaster.stop = 1 + self.taskmaster.test_case.assertTrue(self.was_prepared, + "the task wasn't prepared") + + def postprocess(self): + self.taskmaster.num_postprocessed = self.taskmaster.num_postprocessed + 1 + + def exception_set(self): + pass + +class RandomTask(Task): + def _do_something(self): + # do something that will take some random amount of time: + for i in range(random.randrange(0, 100 + num_sines, 1)): + x = math.sin(i) + time.sleep(0.01) + +class ExceptionTask: + """A dummy task class for testing purposes.""" + + def __init__(self, i, taskmaster): + self.taskmaster = taskmaster + self.was_prepared = 0 + + def prepare(self): + self.was_prepared = 1 + + def needs_execute(self): + return True + + def execute(self): + raise Exception + + def executed(self): + self.taskmaster.num_executed = self.taskmaster.num_executed + 1 + + self.taskmaster.test_case.assertTrue(self.was_prepared, + "the task wasn't prepared") + self.taskmaster.test_case.assertTrue(self.was_executed, + "the task wasn't really executed") + self.taskmaster.test_case.assertTrue(self.__class__ is Task, + "the task wasn't really a Task instance") + + def failed(self): + self.taskmaster.num_failed = self.taskmaster.num_failed + 1 + self.taskmaster.stop = 1 + self.taskmaster.test_case.assertTrue(self.was_prepared, + "the task wasn't prepared") + + def postprocess(self): + self.taskmaster.num_postprocessed = self.taskmaster.num_postprocessed + 1 + + def exception_set(self): + self.taskmaster.exception_set() + +class Taskmaster: + """A dummy taskmaster class for testing the job classes.""" + + def __init__(self, n, test_case, Task): + """n is the number of dummy tasks to perform.""" + + self.test_case = test_case + self.stop = None + self.num_tasks = n + self.num_iterated = 0 + self.num_executed = 0 + self.num_failed = 0 + self.num_postprocessed = 0 + self.parallel_list = [0] * (n+1) + self.found_parallel = False + self.Task = Task + + # 'guard' guards 'task_begin_list' and 'task_end_list' + try: + import threading + self.guard = threading.Lock() + except ImportError: + self.guard = DummyLock() + + # keep track of the order tasks are begun in + self.begin_list = [] + + # keep track of the order tasks are completed in + self.end_list = [] + + def next_task(self): + if self.stop or self.all_tasks_are_iterated(): + return None + else: + self.num_iterated = self.num_iterated + 1 + return self.Task(self.num_iterated, self) + + def all_tasks_are_executed(self): + return self.num_executed == self.num_tasks + + def all_tasks_are_iterated(self): + return self.num_iterated == self.num_tasks + + def all_tasks_are_postprocessed(self): + return self.num_postprocessed == self.num_tasks + + def tasks_were_serial(self): + """analyze the task order to see if they were serial""" + return not self.found_parallel + + def exception_set(self): + pass + + def cleanup(self): + pass + + +SaveThreadPool = None +ThreadPoolCallList = [] + + +class JobTestCase(unittest.TestCase): + """ + Setup common items needed for many Job test cases + """ + def setUp(self) -> None: + """ + Simulating real options parser experimental value. + Since we're in a unit test we're actually using FakeOptionParser() + Which has no values and no defaults. + """ + OptionsParser.values.experimental = [] + + +class ParallelTestCase(JobTestCase): + def runTest(self): + """test parallel jobs""" + + try: + import threading + except ImportError: + raise NoThreadsException() + + taskmaster = Taskmaster(num_tasks, self, RandomTask) + jobs = SCons.Taskmaster.Job.Jobs(num_jobs, taskmaster) + jobs.run() + + self.assertTrue(not taskmaster.tasks_were_serial(), + "the tasks were not executed in parallel") + self.assertTrue(taskmaster.all_tasks_are_executed(), + "all the tests were not executed") + self.assertTrue(taskmaster.all_tasks_are_iterated(), + "all the tests were not iterated over") + self.assertTrue(taskmaster.all_tasks_are_postprocessed(), + "all the tests were not postprocessed") + self.assertFalse(taskmaster.num_failed, + "some task(s) failed to execute") + + # Verify that parallel jobs will pull all of the completed tasks + # out of the queue at once, instead of one by one. We do this by + # replacing the default ThreadPool class with one that records the + # order in which tasks are put() and get() to/from the pool, and + # which sleeps a little bit before call get() to let the initial + # tasks complete and get their notifications on the resultsQueue. + + class SleepTask(Task): + def _do_something(self): + time.sleep(0.01) + + global SaveThreadPool + SaveThreadPool = SCons.Taskmaster.Job.ThreadPool + + class WaitThreadPool(SaveThreadPool): + def put(self, task): + ThreadPoolCallList.append('put(%s)' % task.i) + return SaveThreadPool.put(self, task) + def get(self): + time.sleep(0.05) + result = SaveThreadPool.get(self) + ThreadPoolCallList.append('get(%s)' % result[0].i) + return result + + SCons.Taskmaster.Job.ThreadPool = WaitThreadPool + + try: + taskmaster = Taskmaster(3, self, SleepTask) + jobs = SCons.Taskmaster.Job.Jobs(2, taskmaster) + jobs.run() + + # The key here is that we get(1) and get(2) from the + # resultsQueue before we put(3), but get(1) and get(2) can + # be in either order depending on how the first two parallel + # tasks get scheduled by the operating system. + expect = [ + ['put(1)', 'put(2)', 'get(1)', 'get(2)', 'put(3)', 'get(3)'], + ['put(1)', 'put(2)', 'get(2)', 'get(1)', 'put(3)', 'get(3)'], + ] + assert ThreadPoolCallList in expect, ThreadPoolCallList + + finally: + SCons.Taskmaster.Job.ThreadPool = SaveThreadPool + +class SerialTestCase(unittest.TestCase): + def runTest(self): + """test a serial job""" + + taskmaster = Taskmaster(num_tasks, self, RandomTask) + jobs = SCons.Taskmaster.Job.Jobs(1, taskmaster) + jobs.run() + + self.assertTrue(taskmaster.tasks_were_serial(), + "the tasks were not executed in series") + self.assertTrue(taskmaster.all_tasks_are_executed(), + "all the tests were not executed") + self.assertTrue(taskmaster.all_tasks_are_iterated(), + "all the tests were not iterated over") + self.assertTrue(taskmaster.all_tasks_are_postprocessed(), + "all the tests were not postprocessed") + self.assertFalse(taskmaster.num_failed, + "some task(s) failed to execute") + + +class NoParallelTestCase(JobTestCase): + + def runTest(self): + """test handling lack of parallel support""" + def NoParallel(tm, num, stack_size): + raise NameError + save_Parallel = SCons.Taskmaster.Job.LegacyParallel + SCons.Taskmaster.Job.LegacyParallel = NoParallel + try: + taskmaster = Taskmaster(num_tasks, self, RandomTask) + jobs = SCons.Taskmaster.Job.Jobs(2, taskmaster) + self.assertTrue(jobs.num_jobs == 1, + "unexpected number of jobs %d" % jobs.num_jobs) + jobs.run() + self.assertTrue(taskmaster.tasks_were_serial(), + "the tasks were not executed in series") + self.assertTrue(taskmaster.all_tasks_are_executed(), + "all the tests were not executed") + self.assertTrue(taskmaster.all_tasks_are_iterated(), + "all the tests were not iterated over") + self.assertTrue(taskmaster.all_tasks_are_postprocessed(), + "all the tests were not postprocessed") + self.assertFalse(taskmaster.num_failed, + "some task(s) failed to execute") + finally: + SCons.Taskmaster.Job.LegacyParallel = save_Parallel + + +class SerialExceptionTestCase(unittest.TestCase): + def runTest(self): + """test a serial job with tasks that raise exceptions""" + + taskmaster = Taskmaster(num_tasks, self, ExceptionTask) + jobs = SCons.Taskmaster.Job.Jobs(1, taskmaster) + jobs.run() + + self.assertFalse(taskmaster.num_executed, + "a task was executed") + self.assertTrue(taskmaster.num_iterated == 1, + "exactly one task should have been iterated") + self.assertTrue(taskmaster.num_failed == 1, + "exactly one task should have failed") + self.assertTrue(taskmaster.num_postprocessed == 1, + "exactly one task should have been postprocessed") + + +class ParallelExceptionTestCase(JobTestCase): + + def runTest(self): + """test parallel jobs with tasks that raise exceptions""" + + taskmaster = Taskmaster(num_tasks, self, ExceptionTask) + jobs = SCons.Taskmaster.Job.Jobs(num_jobs, taskmaster) + jobs.run() + + self.assertFalse(taskmaster.num_executed, + "a task was executed") + self.assertTrue(taskmaster.num_iterated >= 1, + "one or more task should have been iterated") + self.assertTrue(taskmaster.num_failed >= 1, + "one or more tasks should have failed") + self.assertTrue(taskmaster.num_postprocessed >= 1, + "one or more tasks should have been postprocessed") + +#--------------------------------------------------------------------- +# Above tested Job object with contrived Task and Taskmaster objects. +# Now test Job object with actual Task and Taskmaster objects. + +import SCons.Taskmaster +import SCons.Node +import time + +class DummyNodeInfo: + def update(self, obj): + pass + +class testnode (SCons.Node.Node): + def __init__(self): + super().__init__() + self.expect_to_be = SCons.Node.executed + self.ninfo = DummyNodeInfo() + +class goodnode (testnode): + def __init__(self): + super().__init__() + self.expect_to_be = SCons.Node.up_to_date + self.ninfo = DummyNodeInfo() + +class slowgoodnode (goodnode): + def prepare(self): + # Delay to allow scheduled Jobs to run while the dispatcher + # sleeps. Keep this short because it affects the time taken + # by this test. + time.sleep(0.15) + goodnode.prepare(self) + +class badnode (goodnode): + def __init__(self): + super().__init__() + self.expect_to_be = SCons.Node.failed + def build(self, **kw): + raise Exception('badnode exception') + +class slowbadnode (badnode): + def build(self, **kw): + # Appears to take a while to build, allowing faster builds to + # overlap. Time duration is not especially important, but if + # it is faster than slowgoodnode then these could complete + # while the scheduler is sleeping. + time.sleep(0.05) + raise Exception('slowbadnode exception') + +class badpreparenode (badnode): + def prepare(self): + raise Exception('badpreparenode exception') + + +class _SConsTaskTest(JobTestCase): + + def _test_seq(self, num_jobs): + for node_seq in [ + [goodnode], + [badnode], + [slowbadnode], + [slowgoodnode], + [badpreparenode], + [goodnode, badnode], + [slowgoodnode, badnode], + [goodnode, slowbadnode], + [goodnode, goodnode, goodnode, slowbadnode], + [goodnode, slowbadnode, badpreparenode, slowgoodnode], + [goodnode, slowbadnode, slowgoodnode, badnode] + ]: + + self._do_test(num_jobs, node_seq) + + def _do_test(self, num_jobs, node_seq): + + testnodes = [] + for tnum in range(num_tasks): + testnodes.append(node_seq[tnum % len(node_seq)]()) + + taskmaster = SCons.Taskmaster.Taskmaster(testnodes, + tasker=SCons.Taskmaster.AlwaysTask) + + jobs = SCons.Taskmaster.Job.Jobs(num_jobs, taskmaster) + + # Exceptions thrown by tasks are not actually propagated to + # this level, but are instead stored in the Taskmaster. + + jobs.run() + + # Now figure out if tests proceeded correctly. The first test + # that fails will shutdown the initiation of subsequent tests, + # but any tests currently queued for execution will still be + # processed, and any tests that completed before the failure + # would have resulted in new tests being queued for execution. + + # Apply the following operational heuristics of Job.py: + # 0) An initial jobset of tasks will be queued before any + # good/bad results are obtained (from "execute" of task in + # thread). + # 1) A goodnode will complete immediately on its thread and + # allow another node to be queued for execution. + # 2) A badnode will complete immediately and suppress any + # subsequent execution queuing, but all currently queued + # tasks will still be processed. + # 3) A slowbadnode will fail later. It will block slots in + # the job queue. Nodes that complete immediately will + # allow other nodes to be queued in their place, and this + # will continue until either (#2) above or until all job + # slots are filled with slowbadnode entries. + + # One approach to validating this test would be to try to + # determine exactly how many nodes executed, how many didn't, + # and the results of each, and then to assert failure on any + # mismatch (including the total number of built nodes). + # However, while this is possible to do for a single-processor + # system, it is nearly impossible to predict correctly for a + # multi-processor system and still test the characteristics of + # delayed execution nodes. Stated another way, multithreading + # is inherently non-deterministic unless you can completely + # characterize the entire system, and since that's not + # possible here, we shouldn't try. + + # Therefore, this test will simply scan the set of nodes to + # see if the node was executed or not and if it was executed + # that it obtained the expected value for that node + # (i.e. verifying we don't get failure crossovers or + # mislabelling of results). + + for N in testnodes: + state = N.get_state() + self.assertTrue(state in [SCons.Node.no_state, N.expect_to_be], + "Node %s got unexpected result: %s" % (N, state)) + + self.assertTrue([N for N in testnodes if N.get_state()], + "no nodes ran at all.") + + +class SerialTaskTest(_SConsTaskTest): + def runTest(self): + """test serial jobs with actual Taskmaster and Task""" + self._test_seq(1) + + +class ParallelTaskTest(_SConsTaskTest): + def runTest(self): + """test parallel jobs with actual Taskmaster and Task""" + self._test_seq(num_jobs) + + # Now run test with NewParallel() instead of LegacyParallel + OptionsParser.values.experimental=['tm_v2'] + self._test_seq(num_jobs) + + + + +#--------------------------------------------------------------------- + +if __name__ == "__main__": + unittest.main() + + + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/SCons/Taskmaster/TaskmasterTests.py scons-4.5.2+dfsg/SCons/Taskmaster/TaskmasterTests.py --- scons-4.4.0+dfsg/SCons/Taskmaster/TaskmasterTests.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Taskmaster/TaskmasterTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,1334 @@ +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +import SCons.compat + +import sys +import unittest + +import SCons.Taskmaster +import SCons.Errors + +import TestCommon + +built_text = None +cache_text = [] +visited_nodes = [] +executed = None +scan_called = 0 + + +class Node: + def __init__(self, name, kids=[], scans=[]): + self.name = name + self.kids = kids + self.scans = scans + self.cached = 0 + self.scanned = 0 + self.scanner = None + self.targets = [self] + self.prerequisites = None + + class Builder: + def targets(self, node): + return node.targets + + self.builder = Builder() + self.bsig = None + self.csig = None + self.state = SCons.Node.no_state + self.prepared = None + self.ref_count = 0 + self.waiting_parents = set() + self.waiting_s_e = set() + self.side_effect = 0 + self.side_effects = [] + self.alttargets = [] + self.postprocessed = None + self._bsig_val = None + self._current_val = 0 + self.always_build = None + + def disambiguate(self): + return self + + def push_to_cache(self): + pass + + def retrieve_from_cache(self): + global cache_text + if self.cached: + cache_text.append(self.name + " retrieved") + return self.cached + + def make_ready(self): + pass + + def prepare(self): + self.prepared = 1 + self.get_binfo() + + def build(self): + global built_text + built_text = self.name + " built" + + def remove(self): + pass + + # The following four methods new_binfo(), del_binfo(), + # get_binfo(), clear() as well as its calls have been added + # to support the cached_execute() test (issue #2720). + # They are full copies (or snippets) of their actual + # counterparts in the Node class... + def new_binfo(self): + binfo = "binfo" + return binfo + + def del_binfo(self): + """Delete the build info from this node.""" + try: + delattr(self, 'binfo') + except AttributeError: + pass + + def get_binfo(self): + """Fetch a node's build information.""" + try: + return self.binfo + except AttributeError: + pass + + binfo = self.new_binfo() + self.binfo = binfo + + return binfo + + def clear(self): + # The del_binfo() call here isn't necessary for normal execution, + # but is for interactive mode, where we might rebuild the same + # target and need to start from scratch. + self.del_binfo() + + def built(self): + global built_text + if not self.cached: + built_text = built_text + " really" + + # Clear the implicit dependency caches of any Nodes + # waiting for this Node to be built. + for parent in self.waiting_parents: + parent.implicit = None + + self.clear() + + def release_target_info(self): + pass + + def has_builder(self): + return self.builder is not None + + def is_derived(self): + return self.has_builder or self.side_effect + + def alter_targets(self): + return self.alttargets, None + + def visited(self): + global visited_nodes + visited_nodes.append(self.name) + + def children(self): + if not self.scanned: + self.scan() + self.scanned = 1 + return self.kids + + def scan(self): + global scan_called + scan_called = scan_called + 1 + self.kids = self.kids + self.scans + self.scans = [] + + def scanner_key(self): + return self.name + + def add_to_waiting_parents(self, node): + wp = self.waiting_parents + if node in wp: + return 0 + wp.add(node) + return 1 + + def get_state(self): + return self.state + + def set_state(self, state): + self.state = state + + def set_bsig(self, bsig): + self.bsig = bsig + + def set_csig(self, csig): + self.csig = csig + + def store_csig(self): + pass + + def store_bsig(self): + pass + + def is_pseudo_derived(self): + pass + + def is_up_to_date(self): + return self._current_val + + def depends_on(self, nodes): + for node in nodes: + if node in self.kids: + return 1 + return 0 + + def __str__(self): + return self.name + + def postprocess(self): + self.postprocessed = 1 + self.waiting_parents = set() + + def get_executor(self): + if not hasattr(self, 'executor'): + class Executor: + def prepare(self): + pass + + def get_action_targets(self): + return self.targets + + def get_all_targets(self): + return self.targets + + def get_all_children(self): + result = [] + for node in self.targets: + result.extend(node.children()) + return result + + def get_all_prerequisites(self): + return [] + + def get_action_side_effects(self): + return [] + + self.executor = Executor() + self.executor.targets = self.targets + return self.executor + + def get_internal_path(self): + """ + Should only be used (currently) by TaskmasterTestCase.test_cached_execute_target_unlink_fails + """ + return str(self) + + +class OtherError(Exception): + pass + + +class MyException(Exception): + pass + + +class TaskmasterTestCase(unittest.TestCase): + + def test_next_task(self): + """Test fetching the next task + """ + global built_text + + n1 = Node("n1") + tm = SCons.Taskmaster.Taskmaster([n1, n1]) + t = tm.next_task() + t.prepare() + t.execute() + t = tm.next_task() + assert t is None + + n1 = Node("n1") + n2 = Node("n2") + n3 = Node("n3", [n1, n2]) + + tm = SCons.Taskmaster.Taskmaster([n3]) + + t = tm.next_task() + t.prepare() + t.execute() + assert built_text == "n1 built", built_text + t.executed() + t.postprocess() + + t = tm.next_task() + t.prepare() + t.execute() + assert built_text == "n2 built", built_text + t.executed() + t.postprocess() + + t = tm.next_task() + t.prepare() + t.execute() + assert built_text == "n3 built", built_text + t.executed() + t.postprocess() + + assert tm.next_task() is None + + built_text = "up to date: " + top_node = n3 + + class MyTask(SCons.Taskmaster.AlwaysTask): + def execute(self): + global built_text + if self.targets[0].get_state() == SCons.Node.up_to_date: + if self.top: + built_text = self.targets[0].name + " up-to-date top" + else: + built_text = self.targets[0].name + " up-to-date" + else: + self.targets[0].build() + + n1.set_state(SCons.Node.no_state) + n1._current_val = 1 + n2.set_state(SCons.Node.no_state) + n2._current_val = 1 + n3.set_state(SCons.Node.no_state) + n3._current_val = 1 + tm = SCons.Taskmaster.Taskmaster(targets=[n3], tasker=MyTask) + + t = tm.next_task() + t.prepare() + t.execute() + assert built_text == "n1 up-to-date", built_text + t.executed() + t.postprocess() + + t = tm.next_task() + t.prepare() + t.execute() + assert built_text == "n2 up-to-date", built_text + t.executed() + t.postprocess() + + t = tm.next_task() + t.prepare() + t.execute() + assert built_text == "n3 up-to-date top", built_text + t.executed() + t.postprocess() + + assert tm.next_task() is None + + n1 = Node("n1") + n2 = Node("n2") + n3 = Node("n3", [n1, n2]) + n4 = Node("n4") + n5 = Node("n5", [n3, n4]) + tm = SCons.Taskmaster.Taskmaster([n5]) + + t1 = tm.next_task() + assert t1.get_target() == n1 + + t2 = tm.next_task() + assert t2.get_target() == n2 + + t4 = tm.next_task() + assert t4.get_target() == n4 + t4.executed() + t4.postprocess() + + t1.executed() + t1.postprocess() + t2.executed() + t2.postprocess() + t3 = tm.next_task() + assert t3.get_target() == n3 + + t3.executed() + t3.postprocess() + t5 = tm.next_task() + assert t5.get_target() == n5, t5.get_target() + t5.executed() + t5.postprocess() + + assert tm.next_task() is None + + n4 = Node("n4") + n4.set_state(SCons.Node.executed) + tm = SCons.Taskmaster.Taskmaster([n4]) + assert tm.next_task() is None + + n1 = Node("n1") + n2 = Node("n2", [n1]) + tm = SCons.Taskmaster.Taskmaster([n2, n2]) + t = tm.next_task() + t.executed() + t.postprocess() + t = tm.next_task() + assert tm.next_task() is None + + n1 = Node("n1") + n2 = Node("n2") + n3 = Node("n3", [n1], [n2]) + tm = SCons.Taskmaster.Taskmaster([n3]) + t = tm.next_task() + target = t.get_target() + assert target == n1, target + t.executed() + t.postprocess() + t = tm.next_task() + target = t.get_target() + assert target == n2, target + t.executed() + t.postprocess() + t = tm.next_task() + target = t.get_target() + assert target == n3, target + t.executed() + t.postprocess() + assert tm.next_task() is None + + n1 = Node("n1") + n2 = Node("n2") + n3 = Node("n3", [n1, n2]) + n4 = Node("n4", [n3]) + n5 = Node("n5", [n3]) + global scan_called + scan_called = 0 + tm = SCons.Taskmaster.Taskmaster([n4]) + t = tm.next_task() + assert t.get_target() == n1 + t.executed() + t.postprocess() + t = tm.next_task() + assert t.get_target() == n2 + t.executed() + t.postprocess() + t = tm.next_task() + assert t.get_target() == n3 + t.executed() + t.postprocess() + t = tm.next_task() + assert t.get_target() == n4 + t.executed() + t.postprocess() + assert tm.next_task() is None + assert scan_called == 4, scan_called + + tm = SCons.Taskmaster.Taskmaster([n5]) + t = tm.next_task() + assert t.get_target() == n5, t.get_target() + t.executed() + assert tm.next_task() is None + assert scan_called == 5, scan_called + + n1 = Node("n1") + n2 = Node("n2") + n3 = Node("n3") + n4 = Node("n4", [n1, n2, n3]) + n5 = Node("n5", [n4]) + n3.side_effect = 1 + n1.side_effects = n2.side_effects = n3.side_effects = [n4] + tm = SCons.Taskmaster.Taskmaster([n1, n2, n3, n4, n5]) + t = tm.next_task() + assert t.get_target() == n1 + assert n4.state == SCons.Node.executing, n4.state + t.executed() + t.postprocess() + t = tm.next_task() + assert t.get_target() == n2 + t.executed() + t.postprocess() + t = tm.next_task() + assert t.get_target() == n3 + t.executed() + t.postprocess() + t = tm.next_task() + assert t.get_target() == n4 + t.executed() + t.postprocess() + t = tm.next_task() + assert t.get_target() == n5 + assert not tm.next_task() + t.executed() + t.postprocess() + + n1 = Node("n1") + n2 = Node("n2") + n3 = Node("n3") + n4 = Node("n4", [n1, n2, n3]) + + def reverse(dependencies): + dependencies.reverse() + return dependencies + + tm = SCons.Taskmaster.Taskmaster([n4], order=reverse) + t = tm.next_task() + assert t.get_target() == n3, t.get_target() + t.executed() + t.postprocess() + t = tm.next_task() + assert t.get_target() == n2, t.get_target() + t.executed() + t.postprocess() + t = tm.next_task() + assert t.get_target() == n1, t.get_target() + t.executed() + t.postprocess() + t = tm.next_task() + assert t.get_target() == n4, t.get_target() + t.executed() + t.postprocess() + + n5 = Node("n5") + n6 = Node("n6") + n7 = Node("n7") + n6.alttargets = [n7] + + tm = SCons.Taskmaster.Taskmaster([n5]) + t = tm.next_task() + assert t.get_target() == n5 + t.executed() + t.postprocess() + + tm = SCons.Taskmaster.Taskmaster([n6]) + t = tm.next_task() + assert t.get_target() == n7 + t.executed() + t.postprocess() + t = tm.next_task() + assert t.get_target() == n6 + t.executed() + t.postprocess() + + n1 = Node("n1") + n2 = Node("n2", [n1]) + n1.set_state(SCons.Node.failed) + tm = SCons.Taskmaster.Taskmaster([n2]) + assert tm.next_task() is None + + n1 = Node("n1") + n2 = Node("n2") + n1.targets = [n1, n2] + n1._current_val = 1 + tm = SCons.Taskmaster.Taskmaster([n1]) + t = tm.next_task() + t.executed() + t.postprocess() + + s = n1.get_state() + assert s == SCons.Node.executed, s + s = n2.get_state() + assert s == SCons.Node.executed, s + + def test_make_ready_out_of_date(self): + """Test the Task.make_ready() method's list of out-of-date Nodes + """ + ood = [] + + def TaskGen(tm, targets, top, node, ood=ood): + class MyTask(SCons.Taskmaster.AlwaysTask): + def make_ready(self): + SCons.Taskmaster.Task.make_ready(self) + self.ood.extend(self.out_of_date) + + t = MyTask(tm, targets, top, node) + t.ood = ood + return t + + n1 = Node("n1") + c2 = Node("c2") + c2._current_val = 1 + n3 = Node("n3") + c4 = Node("c4") + c4._current_val = 1 + a5 = Node("a5") + a5._current_val = 1 + a5.always_build = 1 + tm = SCons.Taskmaster.Taskmaster(targets=[n1, c2, n3, c4, a5], + tasker=TaskGen) + + del ood[:] + t = tm.next_task() + assert ood == [n1], ood + + del ood[:] + t = tm.next_task() + assert ood == [], ood + + del ood[:] + t = tm.next_task() + assert ood == [n3], ood + + del ood[:] + t = tm.next_task() + assert ood == [], ood + + del ood[:] + t = tm.next_task() + assert ood == [a5], ood + + def test_make_ready_exception(self): + """Test handling exceptions from Task.make_ready() + """ + + class MyTask(SCons.Taskmaster.AlwaysTask): + def make_ready(self): + raise MyException("from make_ready()") + + n1 = Node("n1") + tm = SCons.Taskmaster.Taskmaster(targets=[n1], tasker=MyTask) + t = tm.next_task() + exc_type, exc_value, exc_tb = t.exception + assert exc_type == MyException, repr(exc_type) + assert str(exc_value) == "from make_ready()", exc_value + + def test_needs_execute(self): + """Test that we can't instantiate a Task subclass without needs_execute + + We should be getting: + TypeError: Can't instantiate abstract class MyTask with abstract methods needs_execute + """ + + class MyTask(SCons.Taskmaster.Task): + pass + + n1 = Node("n1") + tm = SCons.Taskmaster.Taskmaster(targets=[n1], tasker=MyTask) + with self.assertRaises(TypeError): + _ = tm.next_task() + + def test_make_ready_all(self): + """Test the make_ready_all() method""" + + class MyTask(SCons.Taskmaster.AlwaysTask): + make_ready = SCons.Taskmaster.Task.make_ready_all + + n1 = Node("n1") + c2 = Node("c2") + c2._current_val = 1 + n3 = Node("n3") + c4 = Node("c4") + c4._current_val = 1 + + tm = SCons.Taskmaster.Taskmaster(targets=[n1, c2, n3, c4]) + + t = tm.next_task() + target = t.get_target() + assert target is n1, target + assert target.state == SCons.Node.executing, target.state + t = tm.next_task() + target = t.get_target() + assert target is c2, target + assert target.state == SCons.Node.up_to_date, target.state + t = tm.next_task() + target = t.get_target() + assert target is n3, target + assert target.state == SCons.Node.executing, target.state + t = tm.next_task() + target = t.get_target() + assert target is c4, target + assert target.state == SCons.Node.up_to_date, target.state + t = tm.next_task() + assert t is None + + n1 = Node("n1") + c2 = Node("c2") + n3 = Node("n3") + c4 = Node("c4") + + tm = SCons.Taskmaster.Taskmaster(targets=[n1, c2, n3, c4], + tasker=MyTask) + + t = tm.next_task() + target = t.get_target() + assert target is n1, target + assert target.state == SCons.Node.executing, target.state + t = tm.next_task() + target = t.get_target() + assert target is c2, target + assert target.state == SCons.Node.executing, target.state + t = tm.next_task() + target = t.get_target() + assert target is n3, target + assert target.state == SCons.Node.executing, target.state + t = tm.next_task() + target = t.get_target() + assert target is c4, target + assert target.state == SCons.Node.executing, target.state + t = tm.next_task() + assert t is None + + def test_children_errors(self): + """Test errors when fetching the children of a node. + """ + + class StopNode(Node): + def children(self): + raise SCons.Errors.StopError("stop!") + + class ExitNode(Node): + def children(self): + sys.exit(77) + + n1 = StopNode("n1") + tm = SCons.Taskmaster.Taskmaster([n1]) + t = tm.next_task() + exc_type, exc_value, exc_tb = t.exception + assert exc_type == SCons.Errors.StopError, repr(exc_type) + assert str(exc_value) == "stop!", exc_value + + n2 = ExitNode("n2") + tm = SCons.Taskmaster.Taskmaster([n2]) + t = tm.next_task() + exc_type, exc_value = t.exception + assert exc_type == SCons.Errors.ExplicitExit, repr(exc_type) + assert exc_value.node == n2, exc_value.node + assert exc_value.status == 77, exc_value.status + + def test_cycle_detection(self): + """Test detecting dependency cycles + """ + n1 = Node("n1") + n2 = Node("n2", [n1]) + n3 = Node("n3", [n2]) + n1.kids = [n3] + + tm = SCons.Taskmaster.Taskmaster([n3]) + try: + t = tm.next_task() + except SCons.Errors.UserError as e: + assert str(e) == "Dependency cycle: n3 -> n1 -> n2 -> n3", str(e) + else: + assert 'Did not catch expected UserError' + + def test_next_top_level_candidate(self): + """Test the next_top_level_candidate() method + """ + n1 = Node("n1") + n2 = Node("n2", [n1]) + n3 = Node("n3", [n2]) + + tm = SCons.Taskmaster.Taskmaster([n3]) + t = tm.next_task() + assert t.targets == [n1], t.targets + t.fail_stop() + assert t.targets == [n3], list(map(str, t.targets)) + assert t.top == 1, t.top + + def test_stop(self): + """Test the stop() method + + Both default and overridden in a subclass. + """ + global built_text + + n1 = Node("n1") + n2 = Node("n2") + n3 = Node("n3", [n1, n2]) + + tm = SCons.Taskmaster.Taskmaster([n3]) + t = tm.next_task() + t.prepare() + t.execute() + assert built_text == "n1 built", built_text + t.executed() + t.postprocess() + assert built_text == "n1 built really", built_text + + tm.stop() + assert tm.next_task() is None + + class MyTM(SCons.Taskmaster.Taskmaster): + def stop(self): + global built_text + built_text = "MyTM.stop()" + SCons.Taskmaster.Taskmaster.stop(self) + + n1 = Node("n1") + n2 = Node("n2") + n3 = Node("n3", [n1, n2]) + + built_text = None + tm = MyTM([n3]) + tm.next_task().execute() + assert built_text == "n1 built" + + tm.stop() + assert built_text == "MyTM.stop()" + assert tm.next_task() is None + + def test_executed(self): + """Test when a task has been executed + """ + global built_text + global visited_nodes + + n1 = Node("n1") + tm = SCons.Taskmaster.Taskmaster([n1]) + t = tm.next_task() + built_text = "xxx" + visited_nodes = [] + n1.set_state(SCons.Node.executing) + + t.executed() + + s = n1.get_state() + assert s == SCons.Node.executed, s + assert built_text == "xxx really", built_text + assert visited_nodes == ['n1'], visited_nodes + + n2 = Node("n2") + tm = SCons.Taskmaster.Taskmaster([n2]) + t = tm.next_task() + built_text = "should_not_change" + visited_nodes = [] + n2.set_state(None) + + t.executed() + + s = n2.get_state() + assert s is None, s + assert built_text == "should_not_change", built_text + assert visited_nodes == ['n2'], visited_nodes + + n3 = Node("n3") + n4 = Node("n4") + n3.targets = [n3, n4] + tm = SCons.Taskmaster.Taskmaster([n3]) + t = tm.next_task() + visited_nodes = [] + n3.set_state(SCons.Node.up_to_date) + n4.set_state(SCons.Node.executing) + + t.executed() + + s = n3.get_state() + assert s == SCons.Node.up_to_date, s + s = n4.get_state() + assert s == SCons.Node.executed, s + assert visited_nodes == ['n3', 'n4'], visited_nodes + + def test_prepare(self): + """Test preparation of multiple Nodes for a task + """ + n1 = Node("n1") + n2 = Node("n2") + tm = SCons.Taskmaster.Taskmaster([n1, n2]) + t = tm.next_task() + # This next line is moderately bogus. We're just reaching + # in and setting the targets for this task to an array. The + # "right" way to do this would be to have the next_task() call + # set it up by having something that approximates a real Builder + # return this list--but that's more work than is probably + # warranted right now. + n1.get_executor().targets = [n1, n2] + t.prepare() + assert n1.prepared + assert n2.prepared + + n3 = Node("n3") + n4 = Node("n4") + tm = SCons.Taskmaster.Taskmaster([n3, n4]) + t = tm.next_task() + # More bogus reaching in and setting the targets. + n3.set_state(SCons.Node.up_to_date) + n3.get_executor().targets = [n3, n4] + t.prepare() + assert n3.prepared + assert n4.prepared + + # If the Node has had an exception recorded while it was getting + # prepared, then prepare() should raise that exception. + class MyException(Exception): + pass + + built_text = None + n5 = Node("n5") + tm = SCons.Taskmaster.Taskmaster([n5]) + t = tm.next_task() + t.exception_set((MyException, "exception value")) + exc_caught = None + exc_actually_caught = None + exc_value = None + try: + t.prepare() + except MyException as e: + exc_caught = 1 + exc_value = e + except Exception as exc_actually_caught: + pass + assert exc_caught, "did not catch expected MyException: %s" % exc_actually_caught + assert str(exc_value) == "exception value", exc_value + assert built_text is None, built_text + + # Regression test, make sure we prepare not only + # all targets, but their side effects as well. + n6 = Node("n6") + n7 = Node("n7") + n8 = Node("n8") + n9 = Node("n9") + n10 = Node("n10") + + n6.side_effects = [n8] + n7.side_effects = [n9, n10] + + tm = SCons.Taskmaster.Taskmaster([n6, n7]) + t = tm.next_task() + # More bogus reaching in and setting the targets. + n6.get_executor().targets = [n6, n7] + t.prepare() + assert n6.prepared + assert n7.prepared + assert n8.prepared + assert n9.prepared + assert n10.prepared + + # Make sure we call an Executor's prepare() method. + class ExceptionExecutor: + def prepare(self): + raise Exception("Executor.prepare() exception") + + def get_all_targets(self): + return self.nodes + + def get_all_children(self): + result = [] + for node in self.nodes: + result.extend(node.children()) + return result + + def get_all_prerequisites(self): + return [] + + def get_action_side_effects(self): + return [] + + n11 = Node("n11") + n11.executor = ExceptionExecutor() + n11.executor.nodes = [n11] + tm = SCons.Taskmaster.Taskmaster([n11]) + t = tm.next_task() + try: + t.prepare() + except Exception as e: + assert str(e) == "Executor.prepare() exception", e + else: + raise AssertionError("did not catch expected exception") + + def test_execute(self): + """Test executing a task + """ + global built_text + global cache_text + + n1 = Node("n1") + tm = SCons.Taskmaster.Taskmaster([n1]) + t = tm.next_task() + t.execute() + assert built_text == "n1 built", built_text + + def raise_UserError(): + raise SCons.Errors.UserError + + n2 = Node("n2") + n2.build = raise_UserError + tm = SCons.Taskmaster.Taskmaster([n2]) + t = tm.next_task() + try: + t.execute() + except SCons.Errors.UserError: + pass + else: + self.fail("did not catch expected UserError") + + def raise_BuildError(): + raise SCons.Errors.BuildError + + n3 = Node("n3") + n3.build = raise_BuildError + tm = SCons.Taskmaster.Taskmaster([n3]) + t = tm.next_task() + try: + t.execute() + except SCons.Errors.BuildError: + pass + else: + self.fail("did not catch expected BuildError") + + # On a generic (non-BuildError) exception from a Builder, + # the target should throw a BuildError exception with the + # args set to the exception value, instance, and traceback. + def raise_OtherError(): + raise OtherError + + n4 = Node("n4") + n4.build = raise_OtherError + tm = SCons.Taskmaster.Taskmaster([n4]) + t = tm.next_task() + try: + t.execute() + except SCons.Errors.BuildError as e: + assert e.node == n4, e.node + assert e.errstr == "OtherError : ", e.errstr + assert len(e.exc_info) == 3, e.exc_info + exc_traceback = sys.exc_info()[2] + assert isinstance(e.exc_info[2], type(exc_traceback)), e.exc_info[2] + else: + self.fail("did not catch expected BuildError") + + built_text = None + cache_text = [] + n5 = Node("n5") + n6 = Node("n6") + n6.cached = 1 + tm = SCons.Taskmaster.Taskmaster([n5]) + t = tm.next_task() + # This next line is moderately bogus. We're just reaching + # in and setting the targets for this task to an array. The + # "right" way to do this would be to have the next_task() call + # set it up by having something that approximates a real Builder + # return this list--but that's more work than is probably + # warranted right now. + t.targets = [n5, n6] + t.execute() + assert built_text == "n5 built", built_text + assert cache_text == [], cache_text + + built_text = None + cache_text = [] + n7 = Node("n7") + n8 = Node("n8") + n7.cached = 1 + n8.cached = 1 + tm = SCons.Taskmaster.Taskmaster([n7]) + t = tm.next_task() + # This next line is moderately bogus. We're just reaching + # in and setting the targets for this task to an array. The + # "right" way to do this would be to have the next_task() call + # set it up by having something that approximates a real Builder + # return this list--but that's more work than is probably + # warranted right now. + t.targets = [n7, n8] + t.execute() + assert built_text is None, built_text + assert cache_text == ["n7 retrieved", "n8 retrieved"], cache_text + + def test_cached_execute(self): + """Test executing a task with cached targets + """ + # In issue #2720 Alexei Klimkin detected that the previous + # workflow for execute() led to problems in a multithreaded build. + # We have: + # task.prepare() + # task.execute() + # task.executed() + # -> node.visited() + # for the Serial flow, but + # - Parallel - - Worker - + # task.prepare() + # requestQueue.put(task) + # task = requestQueue.get() + # task.execute() + # resultQueue.put(task) + # task = resultQueue.get() + # task.executed() + # ->node.visited() + # in parallel. Since execute() used to call built() when a target + # was cached, it could unblock dependent nodes before the binfo got + # restored again in visited(). This resulted in spurious + # "file not found" build errors, because files fetched from cache would + # be seen as not up to date and wouldn't be scanned for implicit + # dependencies. + # + # The following test ensures that execute() only marks targets as cached, + # but the actual call to built() happens in executed() only. + # Like this, the binfo should still be intact after calling execute()... + global cache_text + + n1 = Node("n1") + # Mark the node as being cached + n1.cached = True + tm = SCons.Taskmaster.Taskmaster([n1]) + t = tm.next_task() + t.prepare() + t.execute() + assert cache_text == ["n1 retrieved"], cache_text + # If no binfo exists anymore, something has gone wrong... + has_binfo = hasattr(n1, 'binfo') + assert has_binfo, has_binfo + + def test_cached_execute_target_unlink_fails(self): + """Test executing a task with cached targets where unlinking one of the targets fail + """ + global cache_text + import SCons.Warnings + + cache_text = [] + n1 = Node("n1") + n2 = Node("not-cached") + + class DummyFS: + def unlink(self, _): + raise IOError + + n1.fs = DummyFS() + + # Mark the node as being cached + n1.cached = True + # Add n2 as a target for n1 + n1.targets.append(n2) + # Explicitly mark n2 as not cached + n2.cached = False + + # Save SCons.Warnings.warn so we can mock it and catch it being called for unlink failures + _save_warn = SCons.Warnings.warn + issued_warnings = [] + + def fake_warnings_warn(clz, message): + nonlocal issued_warnings + issued_warnings.append((clz, message)) + SCons.Warnings.warn = fake_warnings_warn + + tm = SCons.Taskmaster.Taskmaster([n1, n2]) + t = tm.next_task() + t.prepare() + t.execute() + + # Restore saved warn + SCons.Warnings.warn = _save_warn + + self.assertTrue(len(issued_warnings) == 1, + msg='More than expected warnings (1) were issued %d' % len(issued_warnings)) + self.assertEqual(issued_warnings[0][0], SCons.Warnings.CacheCleanupErrorWarning, + msg='Incorrect warning class') + self.assertEqual(issued_warnings[0][1], + 'Failed copying all target files from cache, Error while attempting to remove file n1 retrieved from cache: ') + self.assertEqual(cache_text, ["n1 retrieved"], msg=cache_text) + + + def test_exception(self): + """Test generic Taskmaster exception handling + + """ + n1 = Node("n1") + tm = SCons.Taskmaster.Taskmaster([n1]) + t = tm.next_task() + + t.exception_set((1, 2)) + exc_type, exc_value = t.exception + assert exc_type == 1, exc_type + assert exc_value == 2, exc_value + + t.exception_set(3) + assert t.exception == 3 + + try: + 1 // 0 + except: + # Moved from below + t.exception_set(None) + # pass + + # Having this here works for python 2.x, + # but it is a tuple (None, None, None) when called outside + # an except statement + # t.exception_set(None) + + exc_type, exc_value, exc_tb = t.exception + assert exc_type is ZeroDivisionError, "Expecting ZeroDevisionError got:%s" % exc_type + exception_values = [ + "integer division or modulo", + "integer division or modulo by zero", + "integer division by zero", # PyPy2 + ] + assert str(exc_value) in exception_values, exc_value + + class Exception1(Exception): + pass + + # Previously value was None, but while PY2 None = "", in Py3 None != "", so set to "" + t.exception_set((Exception1, "")) + try: + t.exception_raise() + except: + exc_type, exc_value = sys.exc_info()[:2] + assert exc_type == Exception1, exc_type + assert str(exc_value) == '', "Expecting empty string got:%s (type %s)" % (exc_value, type(exc_value)) + else: + assert 0, "did not catch expected exception" + + class Exception2(Exception): + pass + + t.exception_set((Exception2, "xyzzy")) + try: + t.exception_raise() + except: + exc_type, exc_value = sys.exc_info()[:2] + assert exc_type == Exception2, exc_type + assert str(exc_value) == "xyzzy", exc_value + else: + assert 0, "did not catch expected exception" + + class Exception3(Exception): + pass + + try: + 1 // 0 + except: + tb = sys.exc_info()[2] + t.exception_set((Exception3, "arg", tb)) + try: + t.exception_raise() + except: + exc_type, exc_value, exc_tb = sys.exc_info() + assert exc_type == Exception3, exc_type + assert str(exc_value) == "arg", exc_value + import traceback + x = traceback.extract_tb(tb)[-1] + y = traceback.extract_tb(exc_tb)[-1] + assert x == y, "x = %s, y = %s" % (x, y) + else: + assert 0, "did not catch expected exception" + + def test_postprocess(self): + """Test postprocessing targets to give them a chance to clean up + """ + n1 = Node("n1") + tm = SCons.Taskmaster.Taskmaster([n1]) + + t = tm.next_task() + assert not n1.postprocessed + t.postprocess() + assert n1.postprocessed + + n2 = Node("n2") + n3 = Node("n3") + tm = SCons.Taskmaster.Taskmaster([n2, n3]) + + assert not n2.postprocessed + assert not n3.postprocessed + t = tm.next_task() + t.postprocess() + assert n2.postprocessed + assert not n3.postprocessed + t = tm.next_task() + t.postprocess() + assert n2.postprocessed + assert n3.postprocessed + + def test_trace(self): + """Test Taskmaster tracing + """ + import io + + trace = io.StringIO() + n1 = Node("n1") + n2 = Node("n2") + n3 = Node("n3", [n1, n2]) + tm = SCons.Taskmaster.Taskmaster([n1, n1, n3], trace=trace) + t = tm.next_task() + t.prepare() + t.execute() + t.postprocess() + n1.set_state(SCons.Node.executed) + t = tm.next_task() + t.prepare() + t.execute() + t.postprocess() + n2.set_state(SCons.Node.executed) + t = tm.next_task() + t.prepare() + t.execute() + t.postprocess() + t = tm.next_task() + assert t is None + + value = trace.getvalue() + expect = """\ + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.execute(): node +Task.postprocess(): node + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: already handled (executed) +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: +Taskmaster: adjusted ref count: , child 'n2' +Taskmaster: Considering node and its children: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.execute(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.execute(): node +Task.postprocess(): node + +Taskmaster: Looking for a node to evaluate +Taskmaster: No candidate anymore. +""" + + if value != expect: + TestCommon.TestCommon.detailed_diff(value, expect) + + assert value == expect, "Expected taskmaster trace contents didn't match. See above" + + +if __name__ == "__main__": + unittest.main() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/SCons/Taskmaster.py scons-4.5.2+dfsg/SCons/Taskmaster.py --- scons-4.4.0+dfsg/SCons/Taskmaster.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Taskmaster.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1059 +0,0 @@ -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -"""Generic Taskmaster module for the SCons build engine. - -This module contains the primary interface(s) between a wrapping user -interface and the SCons build engine. There are two key classes here: - -Taskmaster - This is the main engine for walking the dependency graph and - calling things to decide what does or doesn't need to be built. - -Task - This is the base class for allowing a wrapping interface to - decide what does or doesn't actually need to be done. The - intention is for a wrapping interface to subclass this as - appropriate for different types of behavior it may need. - - The canonical example is the SCons native Python interface, - which has Task subclasses that handle its specific behavior, - like printing "'foo' is up to date" when a top-level target - doesn't need to be built, and handling the -c option by removing - targets as its "build" action. There is also a separate subclass - for suppressing this output when the -q option is used. - - The Taskmaster instantiates a Task object for each (set of) - target(s) that it decides need to be evaluated and/or built. -""" - -import sys -from abc import ABC, abstractmethod -from itertools import chain - -import SCons.Errors -import SCons.Node -import SCons.Warnings - -StateString = SCons.Node.StateString -NODE_NO_STATE = SCons.Node.no_state -NODE_PENDING = SCons.Node.pending -NODE_EXECUTING = SCons.Node.executing -NODE_UP_TO_DATE = SCons.Node.up_to_date -NODE_EXECUTED = SCons.Node.executed -NODE_FAILED = SCons.Node.failed - -print_prepare = False # set by option --debug=prepare - -# A subsystem for recording stats about how different Nodes are handled by -# the main Taskmaster loop. There's no external control here (no need for -# a --debug= option); enable it by changing the value of CollectStats. - -CollectStats = None - -class Stats: - """ - A simple class for holding statistics about the disposition of a - Node by the Taskmaster. If we're collecting statistics, each Node - processed by the Taskmaster gets one of these attached, in which case - the Taskmaster records its decision each time it processes the Node. - (Ideally, that's just once per Node.) - """ - def __init__(self): - """ - Instantiates a Taskmaster.Stats object, initializing all - appropriate counters to zero. - """ - self.considered = 0 - self.already_handled = 0 - self.problem = 0 - self.child_failed = 0 - self.not_built = 0 - self.side_effects = 0 - self.build = 0 - -StatsNodes = [] - -fmt = "%(considered)3d "\ - "%(already_handled)3d " \ - "%(problem)3d " \ - "%(child_failed)3d " \ - "%(not_built)3d " \ - "%(side_effects)3d " \ - "%(build)3d " - -def dump_stats(): - for n in sorted(StatsNodes, key=lambda a: str(a)): - print((fmt % n.attributes.stats.__dict__) + str(n)) - - -class Task(ABC): - """ SCons build engine abstract task class. - - This controls the interaction of the actual building of node - and the rest of the engine. - - This is expected to handle all of the normally-customizable - aspects of controlling a build, so any given application - *should* be able to do what it wants by sub-classing this - class and overriding methods as appropriate. If an application - needs to customize something by sub-classing Taskmaster (or - some other build engine class), we should first try to migrate - that functionality into this class. - - Note that it's generally a good idea for sub-classes to call - these methods explicitly to update state, etc., rather than - roll their own interaction with Taskmaster from scratch. - """ - def __init__(self, tm, targets, top, node): - self.tm = tm - self.targets = targets - self.top = top - self.node = node - self.exc_clear() - - def trace_message(self, method, node, description='node'): - fmt = '%-20s %s %s\n' - return fmt % (method + ':', description, self.tm.trace_node(node)) - - def display(self, message): - """ - Hook to allow the calling interface to display a message. - - This hook gets called as part of preparing a task for execution - (that is, a Node to be built). As part of figuring out what Node - should be built next, the actual target list may be altered, - along with a message describing the alteration. The calling - interface can subclass Task and provide a concrete implementation - of this method to see those messages. - """ - pass - - def prepare(self): - """ - Called just before the task is executed. - - This is mainly intended to give the target Nodes a chance to - unlink underlying files and make all necessary directories before - the Action is actually called to build the targets. - """ - global print_prepare - T = self.tm.trace - if T: T.write(self.trace_message('Task.prepare()', self.node)) - - # Now that it's the appropriate time, give the TaskMaster a - # chance to raise any exceptions it encountered while preparing - # this task. - self.exception_raise() - - if self.tm.message: - self.display(self.tm.message) - self.tm.message = None - - # Let the targets take care of any necessary preparations. - # This includes verifying that all of the necessary sources - # and dependencies exist, removing the target file(s), etc. - # - # As of April 2008, the get_executor().prepare() method makes - # sure that all of the aggregate sources necessary to build this - # Task's target(s) exist in one up-front check. The individual - # target t.prepare() methods check that each target's explicit - # or implicit dependencies exists, and also initialize the - # .sconsign info. - executor = self.targets[0].get_executor() - if executor is None: - return - executor.prepare() - for t in executor.get_action_targets(): - if print_prepare: - print("Preparing target %s..."%t) - for s in t.side_effects: - print("...with side-effect %s..."%s) - t.prepare() - for s in t.side_effects: - if print_prepare: - print("...Preparing side-effect %s..."%s) - s.prepare() - - def get_target(self): - """Fetch the target being built or updated by this task. - """ - return self.node - - @abstractmethod - def needs_execute(self): - return - - def execute(self): - """ - Called to execute the task. - - This method is called from multiple threads in a parallel build, - so only do thread safe stuff here. Do thread unsafe stuff in - prepare(), executed() or failed(). - """ - T = self.tm.trace - if T: T.write(self.trace_message('Task.execute()', self.node)) - - try: - cached_targets = [] - for t in self.targets: - if not t.retrieve_from_cache(): - break - cached_targets.append(t) - if len(cached_targets) < len(self.targets): - # Remove targets before building. It's possible that we - # partially retrieved targets from the cache, leaving - # them in read-only mode. That might cause the command - # to fail. - # - for t in cached_targets: - try: - t.fs.unlink(t.get_internal_path()) - except (IOError, OSError): - pass - self.targets[0].build() - else: - for t in cached_targets: - t.cached = 1 - except SystemExit: - exc_value = sys.exc_info()[1] - raise SCons.Errors.ExplicitExit(self.targets[0], exc_value.code) - except SCons.Errors.UserError: - raise - except SCons.Errors.BuildError: - raise - except Exception as e: - buildError = SCons.Errors.convert_to_BuildError(e) - buildError.node = self.targets[0] - buildError.exc_info = sys.exc_info() - raise buildError - - def executed_without_callbacks(self): - """ - Called when the task has been successfully executed - and the Taskmaster instance doesn't want to call - the Node's callback methods. - """ - T = self.tm.trace - if T: T.write(self.trace_message('Task.executed_without_callbacks()', - self.node)) - - for t in self.targets: - if t.get_state() == NODE_EXECUTING: - for side_effect in t.side_effects: - side_effect.set_state(NODE_NO_STATE) - t.set_state(NODE_EXECUTED) - - def executed_with_callbacks(self): - """ - Called when the task has been successfully executed and - the Taskmaster instance wants to call the Node's callback - methods. - - This may have been a do-nothing operation (to preserve build - order), so we must check the node's state before deciding whether - it was "built", in which case we call the appropriate Node method. - In any event, we always call "visited()", which will handle any - post-visit actions that must take place regardless of whether - or not the target was an actual built target or a source Node. - """ - global print_prepare - T = self.tm.trace - if T: T.write(self.trace_message('Task.executed_with_callbacks()', - self.node)) - - for t in self.targets: - if t.get_state() == NODE_EXECUTING: - for side_effect in t.side_effects: - side_effect.set_state(NODE_NO_STATE) - t.set_state(NODE_EXECUTED) - if not t.cached: - t.push_to_cache() - t.built() - t.visited() - if (not print_prepare and - (not hasattr(self, 'options') or not self.options.debug_includes)): - t.release_target_info() - else: - t.visited() - - executed = executed_with_callbacks - - def failed(self): - """ - Default action when a task fails: stop the build. - - Note: Although this function is normally invoked on nodes in - the executing state, it might also be invoked on up-to-date - nodes when using Configure(). - """ - self.fail_stop() - - def fail_stop(self): - """ - Explicit stop-the-build failure. - - This sets failure status on the target nodes and all of - their dependent parent nodes. - - Note: Although this function is normally invoked on nodes in - the executing state, it might also be invoked on up-to-date - nodes when using Configure(). - """ - T = self.tm.trace - if T: T.write(self.trace_message('Task.failed_stop()', self.node)) - - # Invoke will_not_build() to clean-up the pending children - # list. - self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED)) - - # Tell the taskmaster to not start any new tasks - self.tm.stop() - - # We're stopping because of a build failure, but give the - # calling Task class a chance to postprocess() the top-level - # target under which the build failure occurred. - self.targets = [self.tm.current_top] - self.top = 1 - - def fail_continue(self): - """ - Explicit continue-the-build failure. - - This sets failure status on the target nodes and all of - their dependent parent nodes. - - Note: Although this function is normally invoked on nodes in - the executing state, it might also be invoked on up-to-date - nodes when using Configure(). - """ - T = self.tm.trace - if T: T.write(self.trace_message('Task.failed_continue()', self.node)) - - self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED)) - - def make_ready_all(self): - """ - Marks all targets in a task ready for execution. - - This is used when the interface needs every target Node to be - visited--the canonical example being the "scons -c" option. - """ - T = self.tm.trace - if T: T.write(self.trace_message('Task.make_ready_all()', self.node)) - - self.out_of_date = self.targets[:] - for t in self.targets: - t.disambiguate().set_state(NODE_EXECUTING) - for s in t.side_effects: - # add disambiguate here to mirror the call on targets above - s.disambiguate().set_state(NODE_EXECUTING) - - def make_ready_current(self): - """ - Marks all targets in a task ready for execution if any target - is not current. - - This is the default behavior for building only what's necessary. - """ - global print_prepare - T = self.tm.trace - if T: T.write(self.trace_message('Task.make_ready_current()', - self.node)) - - self.out_of_date = [] - needs_executing = False - for t in self.targets: - try: - t.disambiguate().make_ready() - is_up_to_date = not t.has_builder() or \ - (not t.always_build and t.is_up_to_date()) - except EnvironmentError as e: - raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename) - - if not is_up_to_date: - self.out_of_date.append(t) - needs_executing = True - - if needs_executing: - for t in self.targets: - t.set_state(NODE_EXECUTING) - for s in t.side_effects: - # add disambiguate here to mirror the call on targets in first loop above - s.disambiguate().set_state(NODE_EXECUTING) - else: - for t in self.targets: - # We must invoke visited() to ensure that the node - # information has been computed before allowing the - # parent nodes to execute. (That could occur in a - # parallel build...) - t.visited() - t.set_state(NODE_UP_TO_DATE) - if (not print_prepare and - (not hasattr(self, 'options') or not self.options.debug_includes)): - t.release_target_info() - - make_ready = make_ready_current - - def postprocess(self): - """ - Post-processes a task after it's been executed. - - This examines all the targets just built (or not, we don't care - if the build was successful, or even if there was no build - because everything was up-to-date) to see if they have any - waiting parent Nodes, or Nodes waiting on a common side effect, - that can be put back on the candidates list. - """ - T = self.tm.trace - if T: T.write(self.trace_message('Task.postprocess()', self.node)) - - # We may have built multiple targets, some of which may have - # common parents waiting for this build. Count up how many - # targets each parent was waiting for so we can subtract the - # values later, and so we *don't* put waiting side-effect Nodes - # back on the candidates list if the Node is also a waiting - # parent. - - targets = set(self.targets) - - pending_children = self.tm.pending_children - parents = {} - for t in targets: - # A node can only be in the pending_children set if it has - # some waiting_parents. - if t.waiting_parents: - if T: T.write(self.trace_message('Task.postprocess()', - t, - 'removing')) - pending_children.discard(t) - for p in t.waiting_parents: - parents[p] = parents.get(p, 0) + 1 - t.waiting_parents = set() - - for t in targets: - if t.side_effects is not None: - for s in t.side_effects: - if s.get_state() == NODE_EXECUTING: - s.set_state(NODE_NO_STATE) - - # The side-effects may have been transferred to - # NODE_NO_STATE by executed_with{,out}_callbacks, but was - # not taken out of the waiting parents/pending children - # data structures. Check for that now. - if s.get_state() == NODE_NO_STATE and s.waiting_parents: - pending_children.discard(s) - for p in s.waiting_parents: - parents[p] = parents.get(p, 0) + 1 - s.waiting_parents = set() - for p in s.waiting_s_e: - if p.ref_count == 0: - self.tm.candidates.append(p) - - for p, subtract in parents.items(): - p.ref_count = p.ref_count - subtract - if T: T.write(self.trace_message('Task.postprocess()', - p, - 'adjusted parent ref count')) - if p.ref_count == 0: - self.tm.candidates.append(p) - - for t in targets: - t.postprocess() - - # Exception handling subsystem. - # - # Exceptions that occur while walking the DAG or examining Nodes - # must be raised, but must be raised at an appropriate time and in - # a controlled manner so we can, if necessary, recover gracefully, - # possibly write out signature information for Nodes we've updated, - # etc. This is done by having the Taskmaster tell us about the - # exception, and letting - - def exc_info(self): - """ - Returns info about a recorded exception. - """ - return self.exception - - def exc_clear(self): - """ - Clears any recorded exception. - - This also changes the "exception_raise" attribute to point - to the appropriate do-nothing method. - """ - self.exception = (None, None, None) - self.exception_raise = self._no_exception_to_raise - - def exception_set(self, exception=None): - """ - Records an exception to be raised at the appropriate time. - - This also changes the "exception_raise" attribute to point - to the method that will, in fact - """ - if not exception: - exception = sys.exc_info() - self.exception = exception - self.exception_raise = self._exception_raise - - def _no_exception_to_raise(self): - pass - - def _exception_raise(self): - """ - Raises a pending exception that was recorded while getting a - Task ready for execution. - """ - exc = self.exc_info()[:] - try: - exc_type, exc_value, exc_traceback = exc - except ValueError: - exc_type, exc_value = exc # pylint: disable=unbalanced-tuple-unpacking - exc_traceback = None - - # raise exc_type(exc_value).with_traceback(exc_traceback) - if isinstance(exc_value, Exception): #hasattr(exc_value, 'with_traceback'): - # If exc_value is an exception, then just reraise - raise exc_value.with_traceback(exc_traceback) - else: - # else we'll create an exception using the value and raise that - raise exc_type(exc_value).with_traceback(exc_traceback) - - - # raise e.__class__, e.__class__(e), sys.exc_info()[2] - # exec("raise exc_type(exc_value).with_traceback(exc_traceback)") - - - -class AlwaysTask(Task): - def needs_execute(self): - """ - Always returns True (indicating this Task should always - be executed). - - Subclasses that need this behavior (as opposed to the default - of only executing Nodes that are out of date w.r.t. their - dependencies) can use this as follows: - - class MyTaskSubclass(SCons.Taskmaster.Task): - needs_execute = SCons.Taskmaster.AlwaysTask.needs_execute - """ - return True - -class OutOfDateTask(Task): - def needs_execute(self): - """ - Returns True (indicating this Task should be executed) if this - Task's target state indicates it needs executing, which has - already been determined by an earlier up-to-date check. - """ - return self.targets[0].get_state() == SCons.Node.executing - - -def find_cycle(stack, visited): - if stack[-1] in visited: - return None - visited.add(stack[-1]) - for n in stack[-1].waiting_parents: - stack.append(n) - if stack[0] == stack[-1]: - return stack - if find_cycle(stack, visited): - return stack - stack.pop() - return None - - -class Taskmaster: - """ - The Taskmaster for walking the dependency DAG. - """ - - def __init__(self, targets=[], tasker=None, order=None, trace=None): - self.original_top = targets - self.top_targets_left = targets[:] - self.top_targets_left.reverse() - self.candidates = [] - if tasker is None: - tasker = OutOfDateTask - self.tasker = tasker - if not order: - order = lambda l: l - self.order = order - self.message = None - self.trace = trace - self.next_candidate = self.find_next_candidate - self.pending_children = set() - - def find_next_candidate(self): - """ - Returns the next candidate Node for (potential) evaluation. - - The candidate list (really a stack) initially consists of all of - the top-level (command line) targets provided when the Taskmaster - was initialized. While we walk the DAG, visiting Nodes, all the - children that haven't finished processing get pushed on to the - candidate list. Each child can then be popped and examined in - turn for whether *their* children are all up-to-date, in which - case a Task will be created for their actual evaluation and - potential building. - - Here is where we also allow candidate Nodes to alter the list of - Nodes that should be examined. This is used, for example, when - invoking SCons in a source directory. A source directory Node can - return its corresponding build directory Node, essentially saying, - "Hey, you really need to build this thing over here instead." - """ - try: - return self.candidates.pop() - except IndexError: - pass - try: - node = self.top_targets_left.pop() - except IndexError: - return None - self.current_top = node - alt, message = node.alter_targets() - if alt: - self.message = message - self.candidates.append(node) - self.candidates.extend(self.order(alt)) - node = self.candidates.pop() - return node - - def no_next_candidate(self): - """ - Stops Taskmaster processing by not returning a next candidate. - - Note that we have to clean-up the Taskmaster candidate list - because the cycle detection depends on the fact all nodes have - been processed somehow. - """ - while self.candidates: - candidates = self.candidates - self.candidates = [] - self.will_not_build(candidates) - return None - - def _validate_pending_children(self): - """ - Validate the content of the pending_children set. Assert if an - internal error is found. - - This function is used strictly for debugging the taskmaster by - checking that no invariants are violated. It is not used in - normal operation. - - The pending_children set is used to detect cycles in the - dependency graph. We call a "pending child" a child that is - found in the "pending" state when checking the dependencies of - its parent node. - - A pending child can occur when the Taskmaster completes a loop - through a cycle. For example, let's imagine a graph made of - three nodes (A, B and C) making a cycle. The evaluation starts - at node A. The Taskmaster first considers whether node A's - child B is up-to-date. Then, recursively, node B needs to - check whether node C is up-to-date. This leaves us with a - dependency graph looking like:: - - Next candidate \ - \ - Node A (Pending) --> Node B(Pending) --> Node C (NoState) - ^ | - | | - +-------------------------------------+ - - Now, when the Taskmaster examines the Node C's child Node A, - it finds that Node A is in the "pending" state. Therefore, - Node A is a pending child of node C. - - Pending children indicate that the Taskmaster has potentially - loop back through a cycle. We say potentially because it could - also occur when a DAG is evaluated in parallel. For example, - consider the following graph:: - - Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ... - | ^ - | | - +----------> Node D (NoState) --------+ - / - Next candidate / - - The Taskmaster first evaluates the nodes A, B, and C and - starts building some children of node C. Assuming, that the - maximum parallel level has not been reached, the Taskmaster - will examine Node D. It will find that Node C is a pending - child of Node D. - - In summary, evaluating a graph with a cycle will always - involve a pending child at one point. A pending child might - indicate either a cycle or a diamond-shaped DAG. Only a - fraction of the nodes ends-up being a "pending child" of - another node. This keeps the pending_children set small in - practice. - - We can differentiate between the two cases if we wait until - the end of the build. At this point, all the pending children - nodes due to a diamond-shaped DAG will have been properly - built (or will have failed to build). But, the pending - children involved in a cycle will still be in the pending - state. - - The taskmaster removes nodes from the pending_children set as - soon as a pending_children node moves out of the pending - state. This also helps to keep the pending_children set small. - """ - - for n in self.pending_children: - assert n.state in (NODE_PENDING, NODE_EXECUTING), \ - (str(n), StateString[n.state]) - assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents)) - for p in n.waiting_parents: - assert p.ref_count > 0, (str(n), str(p), p.ref_count) - - - def trace_message(self, message): - return 'Taskmaster: %s\n' % message - - def trace_node(self, node): - return '<%-10s %-3s %s>' % (StateString[node.get_state()], - node.ref_count, - repr(str(node))) - - def _find_next_ready_node(self): - """ - Finds the next node that is ready to be built. - - This is *the* main guts of the DAG walk. We loop through the - list of candidates, looking for something that has no un-built - children (i.e., that is a leaf Node or has dependencies that are - all leaf Nodes or up-to-date). Candidate Nodes are re-scanned - (both the target Node itself and its sources, which are always - scanned in the context of a given target) to discover implicit - dependencies. A Node that must wait for some children to be - built will be put back on the candidates list after the children - have finished building. A Node that has been put back on the - candidates list in this way may have itself (or its sources) - re-scanned, in order to handle generated header files (e.g.) and - the implicit dependencies therein. - - Note that this method does not do any signature calculation or - up-to-date check itself. All of that is handled by the Task - class. This is purely concerned with the dependency graph walk. - """ - - self.ready_exc = None - - T = self.trace - if T: T.write('\n' + self.trace_message('Looking for a node to evaluate')) - - while True: - node = self.next_candidate() - if node is None: - if T: T.write(self.trace_message('No candidate anymore.') + '\n') - return None - - node = node.disambiguate() - state = node.get_state() - - # For debugging only: - # - # try: - # self._validate_pending_children() - # except: - # self.ready_exc = sys.exc_info() - # return node - - if CollectStats: - if not hasattr(node.attributes, 'stats'): - node.attributes.stats = Stats() - StatsNodes.append(node) - S = node.attributes.stats - S.considered = S.considered + 1 - else: - S = None - - if T: T.write(self.trace_message(' Considering node %s and its children:' % self.trace_node(node))) - - if state == NODE_NO_STATE: - # Mark this node as being on the execution stack: - node.set_state(NODE_PENDING) - elif state > NODE_PENDING: - # Skip this node if it has already been evaluated: - if S: S.already_handled = S.already_handled + 1 - if T: T.write(self.trace_message(' already handled (executed)')) - continue - - executor = node.get_executor() - - try: - children = executor.get_all_children() - except SystemExit: - exc_value = sys.exc_info()[1] - e = SCons.Errors.ExplicitExit(node, exc_value.code) - self.ready_exc = (SCons.Errors.ExplicitExit, e) - if T: T.write(self.trace_message(' SystemExit')) - return node - except Exception as e: - # We had a problem just trying to figure out the - # children (like a child couldn't be linked in to a - # VariantDir, or a Scanner threw something). Arrange to - # raise the exception when the Task is "executed." - self.ready_exc = sys.exc_info() - if S: S.problem = S.problem + 1 - if T: T.write(self.trace_message(' exception %s while scanning children.\n' % e)) - return node - - children_not_visited = [] - children_pending = set() - children_not_ready = [] - children_failed = False - - for child in chain(executor.get_all_prerequisites(), children): - childstate = child.get_state() - - if T: T.write(self.trace_message(' ' + self.trace_node(child))) - - if childstate == NODE_NO_STATE: - children_not_visited.append(child) - elif childstate == NODE_PENDING: - children_pending.add(child) - elif childstate == NODE_FAILED: - children_failed = True - - if childstate <= NODE_EXECUTING: - children_not_ready.append(child) - - # These nodes have not even been visited yet. Add - # them to the list so that on some next pass we can - # take a stab at evaluating them (or their children). - if children_not_visited: - if len(children_not_visited) > 1: - children_not_visited.reverse() - self.candidates.extend(self.order(children_not_visited)) - - # if T and children_not_visited: - # T.write(self.trace_message(' adding to candidates: %s' % map(str, children_not_visited))) - # T.write(self.trace_message(' candidates now: %s\n' % map(str, self.candidates))) - - # Skip this node if any of its children have failed. - # - # This catches the case where we're descending a top-level - # target and one of our children failed while trying to be - # built by a *previous* descent of an earlier top-level - # target. - # - # It can also occur if a node is reused in multiple - # targets. One first descends though the one of the - # target, the next time occurs through the other target. - # - # Note that we can only have failed_children if the - # --keep-going flag was used, because without it the build - # will stop before diving in the other branch. - # - # Note that even if one of the children fails, we still - # added the other children to the list of candidate nodes - # to keep on building (--keep-going). - if children_failed: - for n in executor.get_action_targets(): - n.set_state(NODE_FAILED) - - if S: S.child_failed = S.child_failed + 1 - if T: T.write(self.trace_message('****** %s\n' % self.trace_node(node))) - continue - - if children_not_ready: - for child in children_not_ready: - # We're waiting on one or more derived targets - # that have not yet finished building. - if S: S.not_built = S.not_built + 1 - - # Add this node to the waiting parents lists of - # anything we're waiting on, with a reference - # count so we can be put back on the list for - # re-evaluation when they've all finished. - node.ref_count = node.ref_count + child.add_to_waiting_parents(node) - if T: T.write(self.trace_message(' adjusted ref count: %s, child %s' % - (self.trace_node(node), repr(str(child))))) - - if T: - for pc in children_pending: - T.write(self.trace_message(' adding %s to the pending children set\n' % - self.trace_node(pc))) - self.pending_children = self.pending_children | children_pending - - continue - - # Skip this node if it has side-effects that are - # currently being built: - wait_side_effects = False - for se in executor.get_action_side_effects(): - if se.get_state() == NODE_EXECUTING: - se.add_to_waiting_s_e(node) - wait_side_effects = True - - if wait_side_effects: - if S: S.side_effects = S.side_effects + 1 - continue - - # The default when we've gotten through all of the checks above: - # this node is ready to be built. - if S: S.build = S.build + 1 - if T: T.write(self.trace_message('Evaluating %s\n' % - self.trace_node(node))) - - # For debugging only: - # - # try: - # self._validate_pending_children() - # except: - # self.ready_exc = sys.exc_info() - # return node - - return node - - return None - - def next_task(self): - """ - Returns the next task to be executed. - - This simply asks for the next Node to be evaluated, and then wraps - it in the specific Task subclass with which we were initialized. - """ - node = self._find_next_ready_node() - - if node is None: - return None - - executor = node.get_executor() - if executor is None: - return None - - tlist = executor.get_all_targets() - - task = self.tasker(self, tlist, node in self.original_top, node) - try: - task.make_ready() - except Exception as e : - # We had a problem just trying to get this task ready (like - # a child couldn't be linked to a VariantDir when deciding - # whether this node is current). Arrange to raise the - # exception when the Task is "executed." - self.ready_exc = sys.exc_info() - - if self.ready_exc: - task.exception_set(self.ready_exc) - - self.ready_exc = None - - return task - - def will_not_build(self, nodes, node_func=lambda n: None): - """ - Perform clean-up about nodes that will never be built. Invokes - a user defined function on all of these nodes (including all - of their parents). - """ - - T = self.trace - - pending_children = self.pending_children - - to_visit = set(nodes) - pending_children = pending_children - to_visit - - if T: - for n in nodes: - T.write(self.trace_message(' removing node %s from the pending children set\n' % - self.trace_node(n))) - try: - while len(to_visit): - node = to_visit.pop() - node_func(node) - - # Prune recursion by flushing the waiting children - # list immediately. - parents = node.waiting_parents - node.waiting_parents = set() - - to_visit = to_visit | parents - pending_children = pending_children - parents - - for p in parents: - p.ref_count = p.ref_count - 1 - if T: T.write(self.trace_message(' removing parent %s from the pending children set\n' % - self.trace_node(p))) - except KeyError: - # The container to_visit has been emptied. - pass - - # We have the stick back the pending_children list into the - # taskmaster because the python 1.5.2 compatibility does not - # allow us to use in-place updates - self.pending_children = pending_children - - def stop(self): - """ - Stops the current build completely. - """ - self.next_candidate = self.no_next_candidate - - def cleanup(self): - """ - Check for dependency cycles. - """ - if not self.pending_children: - return - - nclist = [(n, find_cycle([n], set())) for n in self.pending_children] - - genuine_cycles = [ - node for node,cycle in nclist - if cycle or node.get_state() != NODE_EXECUTED - ] - if not genuine_cycles: - # All of the "cycles" found were single nodes in EXECUTED state, - # which is to say, they really weren't cycles. Just return. - return - - desc = 'Found dependency cycle(s):\n' - for node, cycle in nclist: - if cycle: - desc = desc + " " + " -> ".join(map(str, cycle)) + "\n" - else: - desc = desc + \ - " Internal Error: no cycle found for node %s (%s) in state %s\n" % \ - (node, repr(node), StateString[node.get_state()]) - - raise SCons.Errors.UserError(desc) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/SCons/TaskmasterTests.py scons-4.5.2+dfsg/SCons/TaskmasterTests.py --- scons-4.4.0+dfsg/SCons/TaskmasterTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/TaskmasterTests.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1257 +0,0 @@ -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -import SCons.compat - -import sys -import unittest - - -import SCons.Taskmaster -import SCons.Errors - - -built_text = None -cache_text = [] -visited_nodes = [] -executed = None -scan_called = 0 - -class Node: - def __init__(self, name, kids = [], scans = []): - self.name = name - self.kids = kids - self.scans = scans - self.cached = 0 - self.scanned = 0 - self.scanner = None - self.targets = [self] - self.prerequisites = None - class Builder: - def targets(self, node): - return node.targets - self.builder = Builder() - self.bsig = None - self.csig = None - self.state = SCons.Node.no_state - self.prepared = None - self.ref_count = 0 - self.waiting_parents = set() - self.waiting_s_e = set() - self.side_effect = 0 - self.side_effects = [] - self.alttargets = [] - self.postprocessed = None - self._bsig_val = None - self._current_val = 0 - self.always_build = None - - def disambiguate(self): - return self - - def push_to_cache(self): - pass - - def retrieve_from_cache(self): - global cache_text - if self.cached: - cache_text.append(self.name + " retrieved") - return self.cached - - def make_ready(self): - pass - - def prepare(self): - self.prepared = 1 - self.get_binfo() - - def build(self): - global built_text - built_text = self.name + " built" - - def remove(self): - pass - - # The following four methods new_binfo(), del_binfo(), - # get_binfo(), clear() as well as its calls have been added - # to support the cached_execute() test (issue #2720). - # They are full copies (or snippets) of their actual - # counterparts in the Node class... - def new_binfo(self): - binfo = "binfo" - return binfo - - def del_binfo(self): - """Delete the build info from this node.""" - try: - delattr(self, 'binfo') - except AttributeError: - pass - - def get_binfo(self): - """Fetch a node's build information.""" - try: - return self.binfo - except AttributeError: - pass - - binfo = self.new_binfo() - self.binfo = binfo - - return binfo - - def clear(self): - # The del_binfo() call here isn't necessary for normal execution, - # but is for interactive mode, where we might rebuild the same - # target and need to start from scratch. - self.del_binfo() - - def built(self): - global built_text - if not self.cached: - built_text = built_text + " really" - - # Clear the implicit dependency caches of any Nodes - # waiting for this Node to be built. - for parent in self.waiting_parents: - parent.implicit = None - - self.clear() - - def release_target_info(self): - pass - - def has_builder(self): - return self.builder is not None - - def is_derived(self): - return self.has_builder or self.side_effect - - def alter_targets(self): - return self.alttargets, None - - def visited(self): - global visited_nodes - visited_nodes.append(self.name) - - def children(self): - if not self.scanned: - self.scan() - self.scanned = 1 - return self.kids - - def scan(self): - global scan_called - scan_called = scan_called + 1 - self.kids = self.kids + self.scans - self.scans = [] - - def scanner_key(self): - return self.name - - def add_to_waiting_parents(self, node): - wp = self.waiting_parents - if node in wp: - return 0 - wp.add(node) - return 1 - - def get_state(self): - return self.state - - def set_state(self, state): - self.state = state - - def set_bsig(self, bsig): - self.bsig = bsig - - def set_csig(self, csig): - self.csig = csig - - def store_csig(self): - pass - - def store_bsig(self): - pass - - def is_pseudo_derived(self): - pass - - def is_up_to_date(self): - return self._current_val - - def depends_on(self, nodes): - for node in nodes: - if node in self.kids: - return 1 - return 0 - - def __str__(self): - return self.name - - def postprocess(self): - self.postprocessed = 1 - self.waiting_parents = set() - - def get_executor(self): - if not hasattr(self, 'executor'): - class Executor: - def prepare(self): - pass - def get_action_targets(self): - return self.targets - def get_all_targets(self): - return self.targets - def get_all_children(self): - result = [] - for node in self.targets: - result.extend(node.children()) - return result - def get_all_prerequisites(self): - return [] - def get_action_side_effects(self): - return [] - self.executor = Executor() - self.executor.targets = self.targets - return self.executor - -class OtherError(Exception): - pass - -class MyException(Exception): - pass - - -class TaskmasterTestCase(unittest.TestCase): - - def test_next_task(self): - """Test fetching the next task - """ - global built_text - - n1 = Node("n1") - tm = SCons.Taskmaster.Taskmaster([n1, n1]) - t = tm.next_task() - t.prepare() - t.execute() - t = tm.next_task() - assert t is None - - n1 = Node("n1") - n2 = Node("n2") - n3 = Node("n3", [n1, n2]) - - tm = SCons.Taskmaster.Taskmaster([n3]) - - t = tm.next_task() - t.prepare() - t.execute() - assert built_text == "n1 built", built_text - t.executed() - t.postprocess() - - t = tm.next_task() - t.prepare() - t.execute() - assert built_text == "n2 built", built_text - t.executed() - t.postprocess() - - t = tm.next_task() - t.prepare() - t.execute() - assert built_text == "n3 built", built_text - t.executed() - t.postprocess() - - assert tm.next_task() is None - - built_text = "up to date: " - top_node = n3 - - class MyTask(SCons.Taskmaster.AlwaysTask): - def execute(self): - global built_text - if self.targets[0].get_state() == SCons.Node.up_to_date: - if self.top: - built_text = self.targets[0].name + " up-to-date top" - else: - built_text = self.targets[0].name + " up-to-date" - else: - self.targets[0].build() - - n1.set_state(SCons.Node.no_state) - n1._current_val = 1 - n2.set_state(SCons.Node.no_state) - n2._current_val = 1 - n3.set_state(SCons.Node.no_state) - n3._current_val = 1 - tm = SCons.Taskmaster.Taskmaster(targets = [n3], tasker = MyTask) - - t = tm.next_task() - t.prepare() - t.execute() - assert built_text == "n1 up-to-date", built_text - t.executed() - t.postprocess() - - t = tm.next_task() - t.prepare() - t.execute() - assert built_text == "n2 up-to-date", built_text - t.executed() - t.postprocess() - - t = tm.next_task() - t.prepare() - t.execute() - assert built_text == "n3 up-to-date top", built_text - t.executed() - t.postprocess() - - assert tm.next_task() is None - - - n1 = Node("n1") - n2 = Node("n2") - n3 = Node("n3", [n1, n2]) - n4 = Node("n4") - n5 = Node("n5", [n3, n4]) - tm = SCons.Taskmaster.Taskmaster([n5]) - - t1 = tm.next_task() - assert t1.get_target() == n1 - - t2 = tm.next_task() - assert t2.get_target() == n2 - - t4 = tm.next_task() - assert t4.get_target() == n4 - t4.executed() - t4.postprocess() - - t1.executed() - t1.postprocess() - t2.executed() - t2.postprocess() - t3 = tm.next_task() - assert t3.get_target() == n3 - - t3.executed() - t3.postprocess() - t5 = tm.next_task() - assert t5.get_target() == n5, t5.get_target() - t5.executed() - t5.postprocess() - - assert tm.next_task() is None - - - n4 = Node("n4") - n4.set_state(SCons.Node.executed) - tm = SCons.Taskmaster.Taskmaster([n4]) - assert tm.next_task() is None - - n1 = Node("n1") - n2 = Node("n2", [n1]) - tm = SCons.Taskmaster.Taskmaster([n2,n2]) - t = tm.next_task() - t.executed() - t.postprocess() - t = tm.next_task() - assert tm.next_task() is None - - - n1 = Node("n1") - n2 = Node("n2") - n3 = Node("n3", [n1], [n2]) - tm = SCons.Taskmaster.Taskmaster([n3]) - t = tm.next_task() - target = t.get_target() - assert target == n1, target - t.executed() - t.postprocess() - t = tm.next_task() - target = t.get_target() - assert target == n2, target - t.executed() - t.postprocess() - t = tm.next_task() - target = t.get_target() - assert target == n3, target - t.executed() - t.postprocess() - assert tm.next_task() is None - - n1 = Node("n1") - n2 = Node("n2") - n3 = Node("n3", [n1, n2]) - n4 = Node("n4", [n3]) - n5 = Node("n5", [n3]) - global scan_called - scan_called = 0 - tm = SCons.Taskmaster.Taskmaster([n4]) - t = tm.next_task() - assert t.get_target() == n1 - t.executed() - t.postprocess() - t = tm.next_task() - assert t.get_target() == n2 - t.executed() - t.postprocess() - t = tm.next_task() - assert t.get_target() == n3 - t.executed() - t.postprocess() - t = tm.next_task() - assert t.get_target() == n4 - t.executed() - t.postprocess() - assert tm.next_task() is None - assert scan_called == 4, scan_called - - tm = SCons.Taskmaster.Taskmaster([n5]) - t = tm.next_task() - assert t.get_target() == n5, t.get_target() - t.executed() - assert tm.next_task() is None - assert scan_called == 5, scan_called - - n1 = Node("n1") - n2 = Node("n2") - n3 = Node("n3") - n4 = Node("n4", [n1,n2,n3]) - n5 = Node("n5", [n4]) - n3.side_effect = 1 - n1.side_effects = n2.side_effects = n3.side_effects = [n4] - tm = SCons.Taskmaster.Taskmaster([n1,n2,n3,n4,n5]) - t = tm.next_task() - assert t.get_target() == n1 - assert n4.state == SCons.Node.executing, n4.state - t.executed() - t.postprocess() - t = tm.next_task() - assert t.get_target() == n2 - t.executed() - t.postprocess() - t = tm.next_task() - assert t.get_target() == n3 - t.executed() - t.postprocess() - t = tm.next_task() - assert t.get_target() == n4 - t.executed() - t.postprocess() - t = tm.next_task() - assert t.get_target() == n5 - assert not tm.next_task() - t.executed() - t.postprocess() - - n1 = Node("n1") - n2 = Node("n2") - n3 = Node("n3") - n4 = Node("n4", [n1,n2,n3]) - def reverse(dependencies): - dependencies.reverse() - return dependencies - tm = SCons.Taskmaster.Taskmaster([n4], order=reverse) - t = tm.next_task() - assert t.get_target() == n3, t.get_target() - t.executed() - t.postprocess() - t = tm.next_task() - assert t.get_target() == n2, t.get_target() - t.executed() - t.postprocess() - t = tm.next_task() - assert t.get_target() == n1, t.get_target() - t.executed() - t.postprocess() - t = tm.next_task() - assert t.get_target() == n4, t.get_target() - t.executed() - t.postprocess() - - n5 = Node("n5") - n6 = Node("n6") - n7 = Node("n7") - n6.alttargets = [n7] - - tm = SCons.Taskmaster.Taskmaster([n5]) - t = tm.next_task() - assert t.get_target() == n5 - t.executed() - t.postprocess() - - tm = SCons.Taskmaster.Taskmaster([n6]) - t = tm.next_task() - assert t.get_target() == n7 - t.executed() - t.postprocess() - t = tm.next_task() - assert t.get_target() == n6 - t.executed() - t.postprocess() - - n1 = Node("n1") - n2 = Node("n2", [n1]) - n1.set_state(SCons.Node.failed) - tm = SCons.Taskmaster.Taskmaster([n2]) - assert tm.next_task() is None - - n1 = Node("n1") - n2 = Node("n2") - n1.targets = [n1, n2] - n1._current_val = 1 - tm = SCons.Taskmaster.Taskmaster([n1]) - t = tm.next_task() - t.executed() - t.postprocess() - - s = n1.get_state() - assert s == SCons.Node.executed, s - s = n2.get_state() - assert s == SCons.Node.executed, s - - - def test_make_ready_out_of_date(self): - """Test the Task.make_ready() method's list of out-of-date Nodes - """ - ood = [] - def TaskGen(tm, targets, top, node, ood=ood): - class MyTask(SCons.Taskmaster.AlwaysTask): - def make_ready(self): - SCons.Taskmaster.Task.make_ready(self) - self.ood.extend(self.out_of_date) - - t = MyTask(tm, targets, top, node) - t.ood = ood - return t - - n1 = Node("n1") - c2 = Node("c2") - c2._current_val = 1 - n3 = Node("n3") - c4 = Node("c4") - c4._current_val = 1 - a5 = Node("a5") - a5._current_val = 1 - a5.always_build = 1 - tm = SCons.Taskmaster.Taskmaster(targets = [n1, c2, n3, c4, a5], - tasker = TaskGen) - - del ood[:] - t = tm.next_task() - assert ood == [n1], ood - - del ood[:] - t = tm.next_task() - assert ood == [], ood - - del ood[:] - t = tm.next_task() - assert ood == [n3], ood - - del ood[:] - t = tm.next_task() - assert ood == [], ood - - del ood[:] - t = tm.next_task() - assert ood == [a5], ood - - def test_make_ready_exception(self): - """Test handling exceptions from Task.make_ready() - """ - class MyTask(SCons.Taskmaster.AlwaysTask): - def make_ready(self): - raise MyException("from make_ready()") - - n1 = Node("n1") - tm = SCons.Taskmaster.Taskmaster(targets = [n1], tasker = MyTask) - t = tm.next_task() - exc_type, exc_value, exc_tb = t.exception - assert exc_type == MyException, repr(exc_type) - assert str(exc_value) == "from make_ready()", exc_value - - def test_needs_execute(self): - """Test that we can't instantiate a Task subclass without needs_execute - - We should be getting: - TypeError: Can't instantiate abstract class MyTask with abstract methods needs_execute - """ - class MyTask(SCons.Taskmaster.Task): - pass - - n1 = Node("n1") - tm = SCons.Taskmaster.Taskmaster(targets=[n1], tasker=MyTask) - with self.assertRaises(TypeError): - _ = tm.next_task() - - def test_make_ready_all(self): - """Test the make_ready_all() method""" - class MyTask(SCons.Taskmaster.AlwaysTask): - make_ready = SCons.Taskmaster.Task.make_ready_all - - n1 = Node("n1") - c2 = Node("c2") - c2._current_val = 1 - n3 = Node("n3") - c4 = Node("c4") - c4._current_val = 1 - - tm = SCons.Taskmaster.Taskmaster(targets = [n1, c2, n3, c4]) - - t = tm.next_task() - target = t.get_target() - assert target is n1, target - assert target.state == SCons.Node.executing, target.state - t = tm.next_task() - target = t.get_target() - assert target is c2, target - assert target.state == SCons.Node.up_to_date, target.state - t = tm.next_task() - target = t.get_target() - assert target is n3, target - assert target.state == SCons.Node.executing, target.state - t = tm.next_task() - target = t.get_target() - assert target is c4, target - assert target.state == SCons.Node.up_to_date, target.state - t = tm.next_task() - assert t is None - - n1 = Node("n1") - c2 = Node("c2") - n3 = Node("n3") - c4 = Node("c4") - - tm = SCons.Taskmaster.Taskmaster(targets = [n1, c2, n3, c4], - tasker = MyTask) - - t = tm.next_task() - target = t.get_target() - assert target is n1, target - assert target.state == SCons.Node.executing, target.state - t = tm.next_task() - target = t.get_target() - assert target is c2, target - assert target.state == SCons.Node.executing, target.state - t = tm.next_task() - target = t.get_target() - assert target is n3, target - assert target.state == SCons.Node.executing, target.state - t = tm.next_task() - target = t.get_target() - assert target is c4, target - assert target.state == SCons.Node.executing, target.state - t = tm.next_task() - assert t is None - - - def test_children_errors(self): - """Test errors when fetching the children of a node. - """ - class StopNode(Node): - def children(self): - raise SCons.Errors.StopError("stop!") - class ExitNode(Node): - def children(self): - sys.exit(77) - - n1 = StopNode("n1") - tm = SCons.Taskmaster.Taskmaster([n1]) - t = tm.next_task() - exc_type, exc_value, exc_tb = t.exception - assert exc_type == SCons.Errors.StopError, repr(exc_type) - assert str(exc_value) == "stop!", exc_value - - n2 = ExitNode("n2") - tm = SCons.Taskmaster.Taskmaster([n2]) - t = tm.next_task() - exc_type, exc_value = t.exception - assert exc_type == SCons.Errors.ExplicitExit, repr(exc_type) - assert exc_value.node == n2, exc_value.node - assert exc_value.status == 77, exc_value.status - - def test_cycle_detection(self): - """Test detecting dependency cycles - """ - n1 = Node("n1") - n2 = Node("n2", [n1]) - n3 = Node("n3", [n2]) - n1.kids = [n3] - - tm = SCons.Taskmaster.Taskmaster([n3]) - try: - t = tm.next_task() - except SCons.Errors.UserError as e: - assert str(e) == "Dependency cycle: n3 -> n1 -> n2 -> n3", str(e) - else: - assert 'Did not catch expected UserError' - - def test_next_top_level_candidate(self): - """Test the next_top_level_candidate() method - """ - n1 = Node("n1") - n2 = Node("n2", [n1]) - n3 = Node("n3", [n2]) - - tm = SCons.Taskmaster.Taskmaster([n3]) - t = tm.next_task() - assert t.targets == [n1], t.targets - t.fail_stop() - assert t.targets == [n3], list(map(str, t.targets)) - assert t.top == 1, t.top - - def test_stop(self): - """Test the stop() method - - Both default and overridden in a subclass. - """ - global built_text - - n1 = Node("n1") - n2 = Node("n2") - n3 = Node("n3", [n1, n2]) - - tm = SCons.Taskmaster.Taskmaster([n3]) - t = tm.next_task() - t.prepare() - t.execute() - assert built_text == "n1 built", built_text - t.executed() - t.postprocess() - assert built_text == "n1 built really", built_text - - tm.stop() - assert tm.next_task() is None - - class MyTM(SCons.Taskmaster.Taskmaster): - def stop(self): - global built_text - built_text = "MyTM.stop()" - SCons.Taskmaster.Taskmaster.stop(self) - - n1 = Node("n1") - n2 = Node("n2") - n3 = Node("n3", [n1, n2]) - - built_text = None - tm = MyTM([n3]) - tm.next_task().execute() - assert built_text == "n1 built" - - tm.stop() - assert built_text == "MyTM.stop()" - assert tm.next_task() is None - - def test_executed(self): - """Test when a task has been executed - """ - global built_text - global visited_nodes - - n1 = Node("n1") - tm = SCons.Taskmaster.Taskmaster([n1]) - t = tm.next_task() - built_text = "xxx" - visited_nodes = [] - n1.set_state(SCons.Node.executing) - - t.executed() - - s = n1.get_state() - assert s == SCons.Node.executed, s - assert built_text == "xxx really", built_text - assert visited_nodes == ['n1'], visited_nodes - - n2 = Node("n2") - tm = SCons.Taskmaster.Taskmaster([n2]) - t = tm.next_task() - built_text = "should_not_change" - visited_nodes = [] - n2.set_state(None) - - t.executed() - - s = n2.get_state() - assert s is None, s - assert built_text == "should_not_change", built_text - assert visited_nodes == ['n2'], visited_nodes - - n3 = Node("n3") - n4 = Node("n4") - n3.targets = [n3, n4] - tm = SCons.Taskmaster.Taskmaster([n3]) - t = tm.next_task() - visited_nodes = [] - n3.set_state(SCons.Node.up_to_date) - n4.set_state(SCons.Node.executing) - - t.executed() - - s = n3.get_state() - assert s == SCons.Node.up_to_date, s - s = n4.get_state() - assert s == SCons.Node.executed, s - assert visited_nodes == ['n3', 'n4'], visited_nodes - - def test_prepare(self): - """Test preparation of multiple Nodes for a task - """ - n1 = Node("n1") - n2 = Node("n2") - tm = SCons.Taskmaster.Taskmaster([n1, n2]) - t = tm.next_task() - # This next line is moderately bogus. We're just reaching - # in and setting the targets for this task to an array. The - # "right" way to do this would be to have the next_task() call - # set it up by having something that approximates a real Builder - # return this list--but that's more work than is probably - # warranted right now. - n1.get_executor().targets = [n1, n2] - t.prepare() - assert n1.prepared - assert n2.prepared - - n3 = Node("n3") - n4 = Node("n4") - tm = SCons.Taskmaster.Taskmaster([n3, n4]) - t = tm.next_task() - # More bogus reaching in and setting the targets. - n3.set_state(SCons.Node.up_to_date) - n3.get_executor().targets = [n3, n4] - t.prepare() - assert n3.prepared - assert n4.prepared - - # If the Node has had an exception recorded while it was getting - # prepared, then prepare() should raise that exception. - class MyException(Exception): - pass - - built_text = None - n5 = Node("n5") - tm = SCons.Taskmaster.Taskmaster([n5]) - t = tm.next_task() - t.exception_set((MyException, "exception value")) - exc_caught = None - exc_actually_caught = None - exc_value = None - try: - t.prepare() - except MyException as e: - exc_caught = 1 - exc_value = e - except Exception as exc_actually_caught: - pass - assert exc_caught, "did not catch expected MyException: %s" % exc_actually_caught - assert str(exc_value) == "exception value", exc_value - assert built_text is None, built_text - - # Regression test, make sure we prepare not only - # all targets, but their side effects as well. - n6 = Node("n6") - n7 = Node("n7") - n8 = Node("n8") - n9 = Node("n9") - n10 = Node("n10") - - n6.side_effects = [ n8 ] - n7.side_effects = [ n9, n10 ] - - tm = SCons.Taskmaster.Taskmaster([n6, n7]) - t = tm.next_task() - # More bogus reaching in and setting the targets. - n6.get_executor().targets = [n6, n7] - t.prepare() - assert n6.prepared - assert n7.prepared - assert n8.prepared - assert n9.prepared - assert n10.prepared - - # Make sure we call an Executor's prepare() method. - class ExceptionExecutor: - def prepare(self): - raise Exception("Executor.prepare() exception") - def get_all_targets(self): - return self.nodes - def get_all_children(self): - result = [] - for node in self.nodes: - result.extend(node.children()) - return result - def get_all_prerequisites(self): - return [] - def get_action_side_effects(self): - return [] - - n11 = Node("n11") - n11.executor = ExceptionExecutor() - n11.executor.nodes = [n11] - tm = SCons.Taskmaster.Taskmaster([n11]) - t = tm.next_task() - try: - t.prepare() - except Exception as e: - assert str(e) == "Executor.prepare() exception", e - else: - raise AssertionError("did not catch expected exception") - - def test_execute(self): - """Test executing a task - """ - global built_text - global cache_text - - n1 = Node("n1") - tm = SCons.Taskmaster.Taskmaster([n1]) - t = tm.next_task() - t.execute() - assert built_text == "n1 built", built_text - - def raise_UserError(): - raise SCons.Errors.UserError - n2 = Node("n2") - n2.build = raise_UserError - tm = SCons.Taskmaster.Taskmaster([n2]) - t = tm.next_task() - try: - t.execute() - except SCons.Errors.UserError: - pass - else: - self.fail("did not catch expected UserError") - - def raise_BuildError(): - raise SCons.Errors.BuildError - n3 = Node("n3") - n3.build = raise_BuildError - tm = SCons.Taskmaster.Taskmaster([n3]) - t = tm.next_task() - try: - t.execute() - except SCons.Errors.BuildError: - pass - else: - self.fail("did not catch expected BuildError") - - # On a generic (non-BuildError) exception from a Builder, - # the target should throw a BuildError exception with the - # args set to the exception value, instance, and traceback. - def raise_OtherError(): - raise OtherError - n4 = Node("n4") - n4.build = raise_OtherError - tm = SCons.Taskmaster.Taskmaster([n4]) - t = tm.next_task() - try: - t.execute() - except SCons.Errors.BuildError as e: - assert e.node == n4, e.node - assert e.errstr == "OtherError : ", e.errstr - assert len(e.exc_info) == 3, e.exc_info - exc_traceback = sys.exc_info()[2] - assert isinstance(e.exc_info[2], type(exc_traceback)), e.exc_info[2] - else: - self.fail("did not catch expected BuildError") - - built_text = None - cache_text = [] - n5 = Node("n5") - n6 = Node("n6") - n6.cached = 1 - tm = SCons.Taskmaster.Taskmaster([n5]) - t = tm.next_task() - # This next line is moderately bogus. We're just reaching - # in and setting the targets for this task to an array. The - # "right" way to do this would be to have the next_task() call - # set it up by having something that approximates a real Builder - # return this list--but that's more work than is probably - # warranted right now. - t.targets = [n5, n6] - t.execute() - assert built_text == "n5 built", built_text - assert cache_text == [], cache_text - - built_text = None - cache_text = [] - n7 = Node("n7") - n8 = Node("n8") - n7.cached = 1 - n8.cached = 1 - tm = SCons.Taskmaster.Taskmaster([n7]) - t = tm.next_task() - # This next line is moderately bogus. We're just reaching - # in and setting the targets for this task to an array. The - # "right" way to do this would be to have the next_task() call - # set it up by having something that approximates a real Builder - # return this list--but that's more work than is probably - # warranted right now. - t.targets = [n7, n8] - t.execute() - assert built_text is None, built_text - assert cache_text == ["n7 retrieved", "n8 retrieved"], cache_text - - def test_cached_execute(self): - """Test executing a task with cached targets - """ - # In issue #2720 Alexei Klimkin detected that the previous - # workflow for execute() led to problems in a multithreaded build. - # We have: - # task.prepare() - # task.execute() - # task.executed() - # -> node.visited() - # for the Serial flow, but - # - Parallel - - Worker - - # task.prepare() - # requestQueue.put(task) - # task = requestQueue.get() - # task.execute() - # resultQueue.put(task) - # task = resultQueue.get() - # task.executed() - # ->node.visited() - # in parallel. Since execute() used to call built() when a target - # was cached, it could unblock dependent nodes before the binfo got - # restored again in visited(). This resulted in spurious - # "file not found" build errors, because files fetched from cache would - # be seen as not up to date and wouldn't be scanned for implicit - # dependencies. - # - # The following test ensures that execute() only marks targets as cached, - # but the actual call to built() happens in executed() only. - # Like this, the binfo should still be intact after calling execute()... - global cache_text - - n1 = Node("n1") - # Mark the node as being cached - n1.cached = 1 - tm = SCons.Taskmaster.Taskmaster([n1]) - t = tm.next_task() - t.prepare() - t.execute() - assert cache_text == ["n1 retrieved"], cache_text - # If no binfo exists anymore, something has gone wrong... - has_binfo = hasattr(n1, 'binfo') - assert has_binfo, has_binfo - - def test_exception(self): - """Test generic Taskmaster exception handling - - """ - n1 = Node("n1") - tm = SCons.Taskmaster.Taskmaster([n1]) - t = tm.next_task() - - t.exception_set((1, 2)) - exc_type, exc_value = t.exception - assert exc_type == 1, exc_type - assert exc_value == 2, exc_value - - t.exception_set(3) - assert t.exception == 3 - - try: 1//0 - except: - # Moved from below - t.exception_set(None) - #pass - -# import pdb; pdb.set_trace() - - # Having this here works for python 2.x, - # but it is a tuple (None, None, None) when called outside - # an except statement - # t.exception_set(None) - - exc_type, exc_value, exc_tb = t.exception - assert exc_type is ZeroDivisionError, "Expecting ZeroDevisionError got:%s"%exc_type - exception_values = [ - "integer division or modulo", - "integer division or modulo by zero", - "integer division by zero", # PyPy2 - ] - assert str(exc_value) in exception_values, exc_value - - class Exception1(Exception): - pass - - # Previously value was None, but while PY2 None = "", in Py3 None != "", so set to "" - t.exception_set((Exception1, "")) - try: - t.exception_raise() - except: - exc_type, exc_value = sys.exc_info()[:2] - assert exc_type == Exception1, exc_type - assert str(exc_value) == '', "Expecting empty string got:%s (type %s)"%(exc_value,type(exc_value)) - else: - assert 0, "did not catch expected exception" - - class Exception2(Exception): - pass - - t.exception_set((Exception2, "xyzzy")) - try: - t.exception_raise() - except: - exc_type, exc_value = sys.exc_info()[:2] - assert exc_type == Exception2, exc_type - assert str(exc_value) == "xyzzy", exc_value - else: - assert 0, "did not catch expected exception" - - class Exception3(Exception): - pass - - try: - 1//0 - except: - tb = sys.exc_info()[2] - t.exception_set((Exception3, "arg", tb)) - try: - t.exception_raise() - except: - exc_type, exc_value, exc_tb = sys.exc_info() - assert exc_type == Exception3, exc_type - assert str(exc_value) == "arg", exc_value - import traceback - x = traceback.extract_tb(tb)[-1] - y = traceback.extract_tb(exc_tb)[-1] - assert x == y, "x = %s, y = %s" % (x, y) - else: - assert 0, "did not catch expected exception" - - def test_postprocess(self): - """Test postprocessing targets to give them a chance to clean up - """ - n1 = Node("n1") - tm = SCons.Taskmaster.Taskmaster([n1]) - - t = tm.next_task() - assert not n1.postprocessed - t.postprocess() - assert n1.postprocessed - - n2 = Node("n2") - n3 = Node("n3") - tm = SCons.Taskmaster.Taskmaster([n2, n3]) - - assert not n2.postprocessed - assert not n3.postprocessed - t = tm.next_task() - t.postprocess() - assert n2.postprocessed - assert not n3.postprocessed - t = tm.next_task() - t.postprocess() - assert n2.postprocessed - assert n3.postprocessed - - def test_trace(self): - """Test Taskmaster tracing - """ - import io - - trace = io.StringIO() - n1 = Node("n1") - n2 = Node("n2") - n3 = Node("n3", [n1, n2]) - tm = SCons.Taskmaster.Taskmaster([n1, n1, n3], trace=trace) - t = tm.next_task() - t.prepare() - t.execute() - t.postprocess() - n1.set_state(SCons.Node.executed) - t = tm.next_task() - t.prepare() - t.execute() - t.postprocess() - n2.set_state(SCons.Node.executed) - t = tm.next_task() - t.prepare() - t.execute() - t.postprocess() - t = tm.next_task() - assert t is None - - value = trace.getvalue() - expect = """\ - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.execute(): node -Task.postprocess(): node - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: already handled (executed) -Taskmaster: Considering node and its children: -Taskmaster: -Taskmaster: -Taskmaster: adjusted ref count: , child 'n2' -Taskmaster: Considering node and its children: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.execute(): node -Task.postprocess(): node -Task.postprocess(): removing -Task.postprocess(): adjusted parent ref count - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: -Taskmaster: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.execute(): node -Task.postprocess(): node - -Taskmaster: Looking for a node to evaluate -Taskmaster: No candidate anymore. - -""" - assert value == expect, value - - - -if __name__ == "__main__": - unittest.main() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/SCons/Tool/compilation_db.py scons-4.5.2+dfsg/SCons/Tool/compilation_db.py --- scons-4.4.0+dfsg/SCons/Tool/compilation_db.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/compilation_db.py 2023-03-21 16:17:04.000000000 +0000 @@ -34,6 +34,8 @@ import fnmatch import SCons +from SCons.Platform import TempFileMunge + from .cxx import CXXSuffixes from .cc import CSuffixes from .asm import ASSuffixes, ASPPSuffixes @@ -53,6 +55,7 @@ SCons.Node.Python.Value.__init__(self, value) self.Decider(changed_since_last_build_node) + def changed_since_last_build_node(child, target, prev_ni, node): """ Dummy decider to force always building""" return True @@ -103,6 +106,11 @@ return emit_compilation_db_entry +class CompDBTEMPFILE(TempFileMunge): + def __call__(self, target, source, env, for_signature): + return self.cmd + + def compilation_db_entry_action(target, source, env, **kw): """ Create a dictionary with evaluated command line, target, source @@ -119,6 +127,7 @@ target=env["__COMPILATIONDB_UOUTPUT"], source=env["__COMPILATIONDB_USOURCE"], env=env["__COMPILATIONDB_ENV"], + overrides={'TEMPFILE': CompDBTEMPFILE} ) entry = { @@ -163,6 +172,7 @@ json.dump( entries, output_file, sort_keys=True, indent=4, separators=(",", ": ") ) + output_file.write("\n") def scan_compilation_db(node, env, path): diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/extensions/docbook.py scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/extensions/docbook.py --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/extensions/docbook.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/extensions/docbook.py 2023-03-21 16:17:04.000000000 +0000 @@ -157,7 +157,7 @@ global pixelsPerInch global unitHash - m = re.search('([+-]?[\d.]+)(\S+)', length) + m = re.search(r'([+-]?[\d.]+)(\S+)', length) if m is not None and m.lastindex > 1: unit = pixelsPerInch if m.group(2) in unitHash: @@ -204,11 +204,11 @@ return default # If it's a list, get the first element - if type(varString) == type([]): + if isinstance(varString, list): varString = varString[0] # If it's not a string, it must be a node, get its content - if type(varString) != type(""): + if not isinstance(varString, str): varString = varString.content return varString diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/fo/highlight.xsl scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/fo/highlight.xsl --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/fo/highlight.xsl 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/fo/highlight.xsl 2023-03-21 16:17:04.000000000 +0000 @@ -1,77 +1,77 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/c-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/c-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/c-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/c-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,101 +1,101 @@ - - - - - /** - */ - - - - /// - - - - /* - */ - - // - - - # - \ - - - - " - \ - - - ' - \ - - - 0x - ul - lu - u - l - - - - . - - e - ul - lu - u - f - l - - - - auto - _Bool - break - case - char - _Complex - const - continue - default - do - double - else - enum - extern - float - for - goto - if - _Imaginary - inline - int - long - register - restrict - return - short - signed - sizeof - static - struct - switch - typedef - union - unsigned - void - volatile - while - + + + + + /** + */ + + + + /// + + + + /* + */ + + // + + + # + \ + + + + " + \ + + + ' + \ + + + 0x + ul + lu + u + l + + + + . + + e + ul + lu + u + f + l + + + + auto + _Bool + break + case + char + _Complex + const + continue + default + do + double + else + enum + extern + float + for + goto + if + _Imaginary + inline + int + long + register + restrict + return + short + signed + sizeof + static + struct + switch + typedef + union + unsigned + void + volatile + while + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/common.xsl scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/common.xsl --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/common.xsl 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/common.xsl 2023-03-21 16:17:04.000000000 +0000 @@ -1,120 +1,120 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - unprocessed xslthl style: - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + unprocessed xslthl style: + + + + + + + + + + + + + + + + + + + + diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/cpp-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/cpp-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/cpp-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/cpp-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,150 +1,150 @@ - - - - - /** - */ - - - - /// - - - - /* - */ - - // - - - # - \ - - - - " - \ - - - ' - \ - - - 0x - ul - lu - u - l - - - - . - - e - ul - lu - u - f - l - - - - - auto - _Bool - break - case - char - _Complex - const - continue - default - do - double - else - enum - extern - float - for - goto - if - _Imaginary - inline - int - long - register - restrict - return - short - signed - sizeof - static - struct - switch - typedef - union - unsigned - void - volatile - while - - asm - dynamic_cast - namespace - reinterpret_cast - try - bool - explicit - new - static_cast - typeid - catch - false - operator - template - typename - class - friend - private - this - using - const_cast - inline - public - throw - virtual - delete - mutable - protected - true - wchar_t - + + + + + /** + */ + + + + /// + + + + /* + */ + + // + + + # + \ + + + + " + \ + + + ' + \ + + + 0x + ul + lu + u + l + + + + . + + e + ul + lu + u + f + l + + + + + auto + _Bool + break + case + char + _Complex + const + continue + default + do + double + else + enum + extern + float + for + goto + if + _Imaginary + inline + int + long + register + restrict + return + short + signed + sizeof + static + struct + switch + typedef + union + unsigned + void + volatile + while + + asm + dynamic_cast + namespace + reinterpret_cast + try + bool + explicit + new + static_cast + typeid + catch + false + operator + template + typename + class + friend + private + this + using + const_cast + inline + public + throw + virtual + delete + mutable + protected + true + wchar_t + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/csharp-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/csharp-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/csharp-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/csharp-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,187 +1,187 @@ - - - - - /** - */ - - - - /// - - - - /* - */ - - // - - - [ - ] - ( - ) - - - - # - \ - - - - - @" - " - \ - - - - " - \ - - - ' - \ - - - 0x - ul - lu - u - l - - - - . - - e - ul - lu - u - f - d - m - l - - - - abstract - as - base - bool - break - byte - case - catch - char - checked - class - const - continue - decimal - default - delegate - do - double - else - enum - event - explicit - extern - false - finally - fixed - float - for - foreach - goto - if - implicit - in - int - interface - internal - is - lock - long - namespace - new - null - object - operator - out - override - params - private - protected - public - readonly - ref - return - sbyte - sealed - short - sizeof - stackalloc - static - string - struct - switch - this - throw - true - try - typeof - uint - ulong - unchecked - unsafe - ushort - using - virtual - void - volatile - while - - - - add - alias - get - global - partial - remove - set - value - where - yield - + + + + + /** + */ + + + + /// + + + + /* + */ + + // + + + [ + ] + ( + ) + + + + # + \ + + + + + @" + " + \ + + + + " + \ + + + ' + \ + + + 0x + ul + lu + u + l + + + + . + + e + ul + lu + u + f + d + m + l + + + + abstract + as + base + bool + break + byte + case + catch + char + checked + class + const + continue + decimal + default + delegate + do + double + else + enum + event + explicit + extern + false + finally + fixed + float + for + foreach + goto + if + implicit + in + int + interface + internal + is + lock + long + namespace + new + null + object + operator + out + override + params + private + protected + public + readonly + ref + return + sbyte + sealed + short + sizeof + stackalloc + static + string + struct + switch + this + throw + true + try + typeof + uint + ulong + unchecked + unsafe + ushort + using + virtual + void + volatile + while + + + + add + alias + get + global + partial + remove + set + value + where + yield + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/delphi-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/delphi-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/delphi-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/delphi-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,200 +1,200 @@ - - - - - - {$ - } - - - - - (*$ - ) - - - - { - } - - - (* - *) - - // - - ' - - - - #$ - - - - - # - - - - - $ - - - - . - e - - - - - and - else - inherited - packed - then - array - end - initialization - procedure - threadvar - as - except - inline - program - to - asm - exports - interface - property - try - begin - file - is - raise - type - case - final - label - record - unit - class - finalization - library - repeat - unsafe - const - finally - mod - resourcestring - until - constructor - for - nil - sealed - uses - destructor - function - not - set - var - dispinterface - goto - object - shl - while - div - if - of - shr - with - do - implementation - or - static - xor - downto - in - out - string - - - at - on - - - absolute - dynamic - local - platform - requires - abstract - export - message - private - resident - assembler - external - name - protected - safecall - automated - far - near - public - stdcall - cdecl - forward - nodefault - published - stored - contains - implements - overload - read - varargs - default - index - override - readonly - virtual - deprecated - inline - package - register - write - dispid - library - pascal - reintroduce - writeonly - - + + + + + + {$ + } + + + + + (*$ + ) + + + + { + } + + + (* + *) + + // + + ' + + + + #$ + + + + + # + + + + + $ + + + + . + e + + + + + and + else + inherited + packed + then + array + end + initialization + procedure + threadvar + as + except + inline + program + to + asm + exports + interface + property + try + begin + file + is + raise + type + case + final + label + record + unit + class + finalization + library + repeat + unsafe + const + finally + mod + resourcestring + until + constructor + for + nil + sealed + uses + destructor + function + not + set + var + dispinterface + goto + object + shl + while + div + if + of + shr + with + do + implementation + or + static + xor + downto + in + out + string + + + at + on + + + absolute + dynamic + local + platform + requires + abstract + export + message + private + resident + assembler + external + name + protected + safecall + automated + far + near + public + stdcall + cdecl + forward + nodefault + published + stored + contains + implements + overload + read + varargs + default + index + override + readonly + virtual + deprecated + inline + package + register + write + dispid + library + pascal + reintroduce + writeonly + + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/ini-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/ini-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/ini-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/ini-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,45 +1,45 @@ - - - - ; - - - ^(\[.+\]\s*)$ - - MULTILINE - - - - ^(.+)(?==) - - MULTILINE - + + + + ; + + + ^(\[.+\]\s*)$ + + MULTILINE + + + + ^(.+)(?==) + + MULTILINE + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/java-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/java-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/java-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/java-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,117 +1,117 @@ - - - - - /** - */ - - - - /* - */ - - // - - " - \ - - - ' - \ - - - @ - ( - ) - - - 0x - - - - . - e - f - d - l - - - - abstract - boolean - break - byte - case - catch - char - class - const - continue - default - do - double - else - extends - final - finally - float - for - goto - if - implements - import - instanceof - int - interface - long - native - new - package - private - protected - public - return - short - static - strictfp - super - switch - synchronized - this - throw - throws - transient - try - void - volatile - while - + + + + + /** + */ + + + + /* + */ + + // + + " + \ + + + ' + \ + + + @ + ( + ) + + + 0x + + + + . + e + f + d + l + + + + abstract + boolean + break + byte + case + catch + char + class + const + continue + default + do + double + else + extends + final + finally + float + for + goto + if + implements + import + instanceof + int + interface + long + native + new + package + private + protected + public + return + short + static + strictfp + super + switch + synchronized + this + throw + throws + transient + try + void + volatile + while + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/javascript-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/javascript-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/javascript-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/javascript-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,147 +1,147 @@ - - - - - /* - */ - - // - - " - \ - - - ' - \ - - - 0x - - - - . - e - - - - break - case - catch - continue - default - delete - do - else - finally - for - function - if - in - instanceof - new - return - switch - this - throw - try - typeof - var - void - while - with - - abstract - boolean - byte - char - class - const - debugger - double - enum - export - extends - final - float - goto - implements - import - int - interface - long - native - package - private - protected - public - short - static - super - synchronized - throws - transient - volatile - - - prototype - - Array - Boolean - Date - Error - EvalError - Function - Math - Number - Object - RangeError - ReferenceError - RegExp - String - SyntaxError - TypeError - URIError - - decodeURI - decodeURIComponent - encodeURI - encodeURIComponent - eval - isFinite - isNaN - parseFloat - parseInt - - Infinity - NaN - undefined - + + + + + /* + */ + + // + + " + \ + + + ' + \ + + + 0x + + + + . + e + + + + break + case + catch + continue + default + delete + do + else + finally + for + function + if + in + instanceof + new + return + switch + this + throw + try + typeof + var + void + while + with + + abstract + boolean + byte + char + class + const + debugger + double + enum + export + extends + final + float + goto + implements + import + int + interface + long + native + package + private + protected + public + short + static + super + synchronized + throws + transient + volatile + + + prototype + + Array + Boolean + Date + Error + EvalError + Function + Math + Number + Object + RangeError + ReferenceError + RegExp + String + SyntaxError + TypeError + URIError + + decodeURI + decodeURIComponent + encodeURI + encodeURIComponent + eval + isFinite + isNaN + parseFloat + parseInt + + Infinity + NaN + undefined + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/m2-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/m2-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/m2-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/m2-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,90 +1,90 @@ - - - - - (* - *) - - - " - - - ' - - - . - e - - - - and - array - begin - by - case - const - definition - div - do - else - elsif - end - exit - export - for - from - if - implementation - import - in - loop - mod - module - not - of - or - pointer - procedure - qualified - record - repeat - return - set - then - to - type - until - var - while - with - - + + + + + (* + *) + + + " + + + ' + + + . + e + + + + and + array + begin + by + case + const + definition + div + do + else + elsif + end + exit + export + for + from + if + implementation + import + in + loop + mod + module + not + of + or + pointer + procedure + qualified + record + repeat + return + set + then + to + type + until + var + while + with + + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/myxml-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/myxml-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/myxml-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/myxml-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,116 +1,116 @@ - - - - - - - - A - ABBR - ACRONYM - ADDRESS - APPLET - AREA - B - BASE - BASEFONT - BDO - BIG - BLOCKQUOTE - BODY - BR - BUTTON - CAPTION - CENTER - CITE - CODE - COL - COLGROUP - DD - DEL - DFN - DIR - DIV - DL - DT - EM - FIELDSET - FONT - FORM - FRAME - FRAMESET - H1 - H2 - H3 - H4 - H5 - H6 - HEAD - HR - HTML - I - IFRAME - IMG - INPUT - INS - ISINDEX - KBD - LABEL - LEGEND - LI - LINK - MAP - MENU - META - NOFRAMES - NOSCRIPT - OBJECT - OL - OPTGROUP - OPTION - P - PARAM - PRE - Q - S - SAMP - SCRIPT - SELECT - SMALL - SPAN - STRIKE - STRONG - STYLE - SUB - SUP - TABLE - TBODY - TD - TEXTAREA - TFOOT - TH - THEAD - TITLE - TR - TT - U - UL - VAR - XMP - - - - - xsl: - - - + + + + + + + + A + ABBR + ACRONYM + ADDRESS + APPLET + AREA + B + BASE + BASEFONT + BDO + BIG + BLOCKQUOTE + BODY + BR + BUTTON + CAPTION + CENTER + CITE + CODE + COL + COLGROUP + DD + DEL + DFN + DIR + DIV + DL + DT + EM + FIELDSET + FONT + FORM + FRAME + FRAMESET + H1 + H2 + H3 + H4 + H5 + H6 + HEAD + HR + HTML + I + IFRAME + IMG + INPUT + INS + ISINDEX + KBD + LABEL + LEGEND + LI + LINK + MAP + MENU + META + NOFRAMES + NOSCRIPT + OBJECT + OL + OPTGROUP + OPTION + P + PARAM + PRE + Q + S + SAMP + SCRIPT + SELECT + SMALL + SPAN + STRIKE + STRONG + STYLE + SUB + SUP + TABLE + TBODY + TD + TEXTAREA + TFOOT + TH + THEAD + TITLE + TR + TT + U + UL + VAR + XMP + + + + + xsl: + + + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/perl-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/perl-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/perl-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/perl-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,120 +1,120 @@ - - - - # - - << - ' - " - - - - " - \ - - - ' - \ - - - - 0x - - - - . - - - - - if - unless - while - until - foreach - else - elsif - for - when - default - given - - caller - continue - die - do - dump - eval - exit - goto - last - next - redo - return - sub - wantarray - - caller - import - local - my - package - use - - do - import - no - package - require - use - - bless - dbmclose - dbmopen - package - ref - tie - tied - untie - use - - and - or - not - eq - ne - lt - gt - le - ge - cmp - + + + + # + + << + ' + " + + + + " + \ + + + ' + \ + + + + 0x + + + + . + + + + + if + unless + while + until + foreach + else + elsif + for + when + default + given + + caller + continue + die + do + dump + eval + exit + goto + last + next + redo + return + sub + wantarray + + caller + import + local + my + package + use + + do + import + no + package + require + use + + bless + dbmclose + dbmopen + package + ref + tie + tied + untie + use + + and + or + not + eq + ne + lt + gt + le + ge + cmp + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/php-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/php-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/php-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/php-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,149 +1,149 @@ - - - - - /** - */ - - - - /// - - - - /* - */ - - // - # - - " - \ - - - - ' - \ - - - - <<< - - - 0x - - - - . - e - - - - and - or - xor - __FILE__ - exception - __LINE__ - array - as - break - case - class - const - continue - declare - default - die - do - echo - else - elseif - empty - enddeclare - endfor - endforeach - endif - endswitch - endwhile - eval - exit - extends - for - foreach - function - global - if - include - include_once - isset - list - new - print - require - require_once - return - static - switch - unset - use - var - while - __FUNCTION__ - __CLASS__ - __METHOD__ - final - php_user_filter - interface - implements - extends - public - private - protected - abstract - clone - try - catch - throw - cfunction - old_function - true - false - - - - - ?> - <?php - <?= - - + + + + + /** + */ + + + + /// + + + + /* + */ + + // + # + + " + \ + + + + ' + \ + + + + <<< + + + 0x + + + + . + e + + + + and + or + xor + __FILE__ + exception + __LINE__ + array + as + break + case + class + const + continue + declare + default + die + do + echo + else + elseif + empty + enddeclare + endfor + endforeach + endif + endswitch + endwhile + eval + exit + extends + for + foreach + function + global + if + include + include_once + isset + list + new + print + require + require_once + return + static + switch + unset + use + var + while + __FUNCTION__ + __CLASS__ + __METHOD__ + final + php_user_filter + interface + implements + extends + public + private + protected + abstract + clone + try + catch + throw + cfunction + old_function + true + false + + + + + ?> + <?php + <?= + + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/python-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/python-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/python-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/python-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,100 +1,100 @@ - - - - - - @ - ( - ) - - # - - """ - - - - ''' - - - - " - \ - - - ' - \ - - - 0x - l - - - - . - - e - l - - - - and - del - from - not - while - as - elif - global - or - with - assert - else - if - pass - yield - break - except - import - print - class - exec - in - raise - continue - finally - is - return - def - for - lambda - try - + + + + + + @ + ( + ) + + # + + """ + + + + ''' + + + + " + \ + + + ' + \ + + + 0x + l + + + + . + + e + l + + + + and + del + from + not + while + as + elif + global + or + with + assert + else + if + pass + yield + break + except + import + print + class + exec + in + raise + continue + finally + is + return + def + for + lambda + try + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/ruby-hl.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/ruby-hl.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/ruby-hl.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/ruby-hl.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,109 +1,109 @@ - - - - # - - << - - - - " - \ - - - %Q{ - } - \ - - - %/ - / - \ - - - ' - \ - - - %q{ - } - \ - - - 0x - - - - . - e - - - - alias - and - BEGIN - begin - break - case - class - def - defined - do - else - elsif - END - end - ensure - false - for - if - in - module - next - nil - not - or - redo - rescue - retry - return - self - super - then - true - undef - unless - until - when - while - yield - + + + + # + + << + + + + " + \ + + + %Q{ + } + \ + + + %/ + / + \ + + + ' + \ + + + %q{ + } + \ + + + 0x + + + + . + e + + + + alias + and + BEGIN + begin + break + case + class + def + defined + do + else + elsif + END + end + ensure + false + for + if + in + module + next + nil + not + or + redo + rescue + retry + return + self + super + then + true + undef + unless + until + when + while + yield + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/xslthl-config.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/xslthl-config.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/xslthl-config.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/highlighting/xslthl-config.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,46 +1,46 @@ - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/html/highlight.xsl scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/html/highlight.xsl --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/html/highlight.xsl 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/html/highlight.xsl 2023-03-21 16:17:04.000000000 +0000 @@ -1,86 +1,86 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/params/bibliography.style.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/params/bibliography.style.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/params/bibliography.style.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/params/bibliography.style.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,35 +1,35 @@ - - -bibliography.style -list -normal -iso690 - - -bibliography.style -Style used for formatting of biblioentries. - - - - -normal - - - -Description - -Currently only normal and -iso690 styles are supported. - -In order to use ISO690 style to the full extent you might need -to use additional markup described on the -following WiKi page. - - - + + +bibliography.style +list +normal +iso690 + + +bibliography.style +Style used for formatting of biblioentries. + + + + +normal + + + +Description + +Currently only normal and +iso690 styles are supported. + +In order to use ISO690 style to the full extent you might need +to use additional markup described on the +following WiKi page. + + + diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/params/highlight.source.xml scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/params/highlight.source.xml --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/params/highlight.source.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/params/highlight.source.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,82 +1,82 @@ - - -highlight.source -boolean - - -highlight.source -Should the content of programlisting -be syntactically highlighted? - - - - - - - - -Description - -When this parameter is non-zero, the stylesheets will try to do syntax highlighting of the -content of programlisting elements. You specify the language for each programlisting -by using the language attribute. The highlight.default.language -parameter can be used to specify the language for programlistings without a language -attribute. Syntax highlighting also works for screen and synopsis elements. - -The actual highlighting work is done by the XSLTHL extension module. This is an external Java library that has to be -downloaded separately (see below). - - -In order to use this extension, you must - -add xslthl-2.x.x.jar to your Java classpath. The latest version is available -from the XSLT syntax highlighting project -at SourceForge. - - -use a customization layer in which you import one of the following stylesheet modules: - - - html/highlight.xsl - - - - xhtml/highlight.xsl - - - - xhtml-1_1/highlight.xsl - - - - fo/highlight.xsl - - - - - -let either the xslthl.config Java system property or the -highlight.xslthl.config parameter point to the configuration file for syntax -highlighting (using URL syntax). DocBook XSL comes with a ready-to-use configuration file, -highlighting/xslthl-config.xml. - - - -The extension works with Saxon 6.5.x and Xalan-J. (Saxon 8.5 or later is also supported, but since it is -an XSLT 2.0 processor it is not guaranteed to work with DocBook XSL in all circumstances.) - -The following is an example of a Saxon 6 command adapted for syntax highlighting, to be used on Windows: - - -java -cp c:/Java/saxon.jar;c:/Java/xslthl-2.0.1.jar --Dxslthl.config=file:///c:/docbook-xsl/highlighting/xslthl-config.xml com.icl.saxon.StyleSheet --o test.html test.xml myhtml.xsl - - - - + + +highlight.source +boolean + + +highlight.source +Should the content of programlisting +be syntactically highlighted? + + + + + + + + +Description + +When this parameter is non-zero, the stylesheets will try to do syntax highlighting of the +content of programlisting elements. You specify the language for each programlisting +by using the language attribute. The highlight.default.language +parameter can be used to specify the language for programlistings without a language +attribute. Syntax highlighting also works for screen and synopsis elements. + +The actual highlighting work is done by the XSLTHL extension module. This is an external Java library that has to be +downloaded separately (see below). + + +In order to use this extension, you must + +add xslthl-2.x.x.jar to your Java classpath. The latest version is available +from the XSLT syntax highlighting project +at SourceForge. + + +use a customization layer in which you import one of the following stylesheet modules: + + + html/highlight.xsl + + + + xhtml/highlight.xsl + + + + xhtml-1_1/highlight.xsl + + + + fo/highlight.xsl + + + + + +let either the xslthl.config Java system property or the +highlight.xslthl.config parameter point to the configuration file for syntax +highlighting (using URL syntax). DocBook XSL comes with a ready-to-use configuration file, +highlighting/xslthl-config.xml. + + + +The extension works with Saxon 6.5.x and Xalan-J. (Saxon 8.5 or later is also supported, but since it is +an XSLT 2.0 processor it is not guaranteed to work with DocBook XSL in all circumstances.) + +The following is an example of a Saxon 6 command adapted for syntax highlighting, to be used on Windows: + + +java -cp c:/Java/saxon.jar;c:/Java/xslthl-2.0.1.jar +-Dxslthl.config=file:///c:/docbook-xsl/highlighting/xslthl-config.xml com.icl.saxon.StyleSheet +-o test.html test.xml myhtml.xsl + + + + diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/common/jquery/treeview/jquery.treeview.async.js scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/common/jquery/treeview/jquery.treeview.async.js --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/common/jquery/treeview/jquery.treeview.async.js 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/common/jquery/treeview/jquery.treeview.async.js 2023-03-21 16:17:04.000000000 +0000 @@ -1,72 +1,72 @@ -/* - * Async Treeview 0.1 - Lazy-loading extension for Treeview - * - * http://bassistance.de/jquery-plugins/jquery-plugin-treeview/ - * - * Copyright (c) 2007 Jörn Zaefferer - * - * Dual licensed under the MIT and GPL licenses: - * http://www.opensource.org/licenses/mit-license.php - * http://www.gnu.org/licenses/gpl.html - * - * Revision: $Id$ - * - */ - -;(function($) { - -function load(settings, root, child, container) { - $.getJSON(settings.url, {root: root}, function(response) { - function createNode(parent) { - var current = $("
  • ").attr("id", this.id || "").html("" + this.text + "").appendTo(parent); - if (this.classes) { - current.children("span").addClass(this.classes); - } - if (this.expanded) { - current.addClass("open"); - } - if (this.hasChildren || this.children && this.children.length) { - var branch = $("
      ").appendTo(current); - if (this.hasChildren) { - current.addClass("hasChildren"); - createNode.call({ - text:"placeholder", - id:"placeholder", - children:[] - }, branch); - } - if (this.children && this.children.length) { - $.each(this.children, createNode, [branch]) - } - } - } - $.each(response, createNode, [child]); - $(container).treeview({add: child}); - }); -} - -var proxied = $.fn.treeview; -$.fn.treeview = function(settings) { - if (!settings.url) { - return proxied.apply(this, arguments); - } - var container = this; - load(settings, "source", this, container); - var userToggle = settings.toggle; - return proxied.call(this, $.extend({}, settings, { - collapsed: true, - toggle: function() { - var $this = $(this); - if ($this.hasClass("hasChildren")) { - var childList = $this.removeClass("hasChildren").find("ul"); - childList.empty(); - load(settings, this.id, childList, container); - } - if (userToggle) { - userToggle.apply(this, arguments); - } - } - })); -}; - +/* + * Async Treeview 0.1 - Lazy-loading extension for Treeview + * + * http://bassistance.de/jquery-plugins/jquery-plugin-treeview/ + * + * Copyright (c) 2007 Jörn Zaefferer + * + * Dual licensed under the MIT and GPL licenses: + * http://www.opensource.org/licenses/mit-license.php + * http://www.gnu.org/licenses/gpl.html + * + * Revision: $Id$ + * + */ + +;(function($) { + +function load(settings, root, child, container) { + $.getJSON(settings.url, {root: root}, function(response) { + function createNode(parent) { + var current = $("
    • ").attr("id", this.id || "").html("" + this.text + "").appendTo(parent); + if (this.classes) { + current.children("span").addClass(this.classes); + } + if (this.expanded) { + current.addClass("open"); + } + if (this.hasChildren || this.children && this.children.length) { + var branch = $("
        ").appendTo(current); + if (this.hasChildren) { + current.addClass("hasChildren"); + createNode.call({ + text:"placeholder", + id:"placeholder", + children:[] + }, branch); + } + if (this.children && this.children.length) { + $.each(this.children, createNode, [branch]) + } + } + } + $.each(response, createNode, [child]); + $(container).treeview({add: child}); + }); +} + +var proxied = $.fn.treeview; +$.fn.treeview = function(settings) { + if (!settings.url) { + return proxied.apply(this, arguments); + } + var container = this; + load(settings, "source", this, container); + var userToggle = settings.toggle; + return proxied.call(this, $.extend({}, settings, { + collapsed: true, + toggle: function() { + var $this = $(this); + if ($this.hasClass("hasChildren")) { + var childList = $this.removeClass("hasChildren").find("ul"); + childList.empty(); + load(settings, this.id, childList, container); + } + if (userToggle) { + userToggle.apply(this, arguments); + } + } + })); +}; + })(jQuery); \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/common/jquery/treeview/jquery.treeview.css scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/common/jquery/treeview/jquery.treeview.css --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/common/jquery/treeview/jquery.treeview.css 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/common/jquery/treeview/jquery.treeview.css 2023-03-21 16:17:04.000000000 +0000 @@ -1,85 +1,85 @@ -.treeview, .treeview ul { - padding: 0; - margin: 0; - list-style: none; -} - -.treeview ul { - background-color: white; - margin-top: 4px; -} - -.treeview .hitarea { - background: url(images/treeview-default.gif) -64px -25px no-repeat; - height: 16px; - width: 16px; - margin-left: -16px; - float: left; - cursor: pointer; -} -/* fix for IE6 */ -* html .hitarea { - display: inline; - float:none; -} - -.treeview li { - margin: 0; - padding: 3px 0 3px 16px; -} - -.treeview a.selected { - background-color: #eee; -} - -#treecontrol { margin: 1em 0; display: none; } - -.treeview .hover { color: red; cursor: pointer; } - -.treeview li { background: url(images/treeview-default-line.gif) 0 0 no-repeat; } -.treeview li.collapsable, .treeview li.expandable { background-position: 0 -176px; } - -.treeview .expandable-hitarea { background-position: -80px -3px; } - -.treeview li.last { background-position: 0 -1766px } -.treeview li.lastCollapsable, .treeview li.lastExpandable { background-image: url(images/treeview-default.gif); } -.treeview li.lastCollapsable { background-position: 0 -111px } -.treeview li.lastExpandable { background-position: -32px -67px } - -.treeview div.lastCollapsable-hitarea, .treeview div.lastExpandable-hitarea { background-position: 0; } - -.treeview-red li { background-image: url(images/treeview-red-line.gif); } -.treeview-red .hitarea, .treeview-red li.lastCollapsable, .treeview-red li.lastExpandable { background-image: url(images/treeview-red.gif); } - -.treeview-black li { background-image: url(images/treeview-black-line.gif); } -.treeview-black .hitarea, .treeview-black li.lastCollapsable, .treeview-black li.lastExpandable { background-image: url(images/treeview-black.gif); } - -.treeview-gray li { background-image: url(images/treeview-gray-line.gif); } -.treeview-gray .hitarea, .treeview-gray li.lastCollapsable, .treeview-gray li.lastExpandable { background-image: url(images/treeview-gray.gif); } - -.treeview-famfamfam li { background-image: url(images/treeview-famfamfam-line.gif); } -.treeview-famfamfam .hitarea, .treeview-famfamfam li.lastCollapsable, .treeview-famfamfam li.lastExpandable { background-image: url(images/treeview-famfamfam.gif); } - - -.filetree li { padding: 3px 0 2px 16px; } -.filetree span.folder, .filetree span.file { padding: 1px 0 1px 16px; display: block; } -.filetree span.folder { background: url(images/folder.gif) 0 0 no-repeat; } -.filetree li.expandable span.folder { background: url(images/folder-closed.gif) 0 0 no-repeat; } -.filetree span.file { background: url(images/file.gif) 0 0 no-repeat; } - -html, body {height:100%; margin: 0; padding: 0; } - -/* -html>body { - font-size: 16px; - font-size: 68.75%; -} Reset Base Font Size */ - /* -body { - font-family: Verdana, helvetica, arial, sans-serif; - font-size: 68.75%; - background: #fff; - color: #333; -} */ - +.treeview, .treeview ul { + padding: 0; + margin: 0; + list-style: none; +} + +.treeview ul { + background-color: white; + margin-top: 4px; +} + +.treeview .hitarea { + background: url(images/treeview-default.gif) -64px -25px no-repeat; + height: 16px; + width: 16px; + margin-left: -16px; + float: left; + cursor: pointer; +} +/* fix for IE6 */ +* html .hitarea { + display: inline; + float:none; +} + +.treeview li { + margin: 0; + padding: 3px 0 3px 16px; +} + +.treeview a.selected { + background-color: #eee; +} + +#treecontrol { margin: 1em 0; display: none; } + +.treeview .hover { color: red; cursor: pointer; } + +.treeview li { background: url(images/treeview-default-line.gif) 0 0 no-repeat; } +.treeview li.collapsable, .treeview li.expandable { background-position: 0 -176px; } + +.treeview .expandable-hitarea { background-position: -80px -3px; } + +.treeview li.last { background-position: 0 -1766px } +.treeview li.lastCollapsable, .treeview li.lastExpandable { background-image: url(images/treeview-default.gif); } +.treeview li.lastCollapsable { background-position: 0 -111px } +.treeview li.lastExpandable { background-position: -32px -67px } + +.treeview div.lastCollapsable-hitarea, .treeview div.lastExpandable-hitarea { background-position: 0; } + +.treeview-red li { background-image: url(images/treeview-red-line.gif); } +.treeview-red .hitarea, .treeview-red li.lastCollapsable, .treeview-red li.lastExpandable { background-image: url(images/treeview-red.gif); } + +.treeview-black li { background-image: url(images/treeview-black-line.gif); } +.treeview-black .hitarea, .treeview-black li.lastCollapsable, .treeview-black li.lastExpandable { background-image: url(images/treeview-black.gif); } + +.treeview-gray li { background-image: url(images/treeview-gray-line.gif); } +.treeview-gray .hitarea, .treeview-gray li.lastCollapsable, .treeview-gray li.lastExpandable { background-image: url(images/treeview-gray.gif); } + +.treeview-famfamfam li { background-image: url(images/treeview-famfamfam-line.gif); } +.treeview-famfamfam .hitarea, .treeview-famfamfam li.lastCollapsable, .treeview-famfamfam li.lastExpandable { background-image: url(images/treeview-famfamfam.gif); } + + +.filetree li { padding: 3px 0 2px 16px; } +.filetree span.folder, .filetree span.file { padding: 1px 0 1px 16px; display: block; } +.filetree span.folder { background: url(images/folder.gif) 0 0 no-repeat; } +.filetree li.expandable span.folder { background: url(images/folder-closed.gif) 0 0 no-repeat; } +.filetree span.file { background: url(images/file.gif) 0 0 no-repeat; } + +html, body {height:100%; margin: 0; padding: 0; } + +/* +html>body { + font-size: 16px; + font-size: 68.75%; +} Reset Base Font Size */ + /* +body { + font-family: Verdana, helvetica, arial, sans-serif; + font-size: 68.75%; + background: #fff; + color: #333; +} */ + a img { border: none; } \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/common/jquery/treeview/jquery.treeview.min.js scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/common/jquery/treeview/jquery.treeview.min.js --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/common/jquery/treeview/jquery.treeview.min.js 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/common/jquery/treeview/jquery.treeview.min.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -/* - * Treeview 1.4 - jQuery plugin to hide and show branches of a tree - * - * http://bassistance.de/jquery-plugins/jquery-plugin-treeview/ - * http://docs.jquery.com/Plugins/Treeview - * - * Copyright (c) 2007 Jörn Zaefferer - * - * Dual licensed under the MIT and GPL licenses: - * http://www.opensource.org/licenses/mit-license.php - * http://www.gnu.org/licenses/gpl.html - * - * Revision: $Id: jquery.treeview.js 4684 2008-02-07 19:08:06Z joern.zaefferer $ - * kasunbg: changed the cookieid name - * - */;(function($){$.extend($.fn,{swapClass:function(c1,c2){var c1Elements=this.filter('.'+c1);this.filter('.'+c2).removeClass(c2).addClass(c1);c1Elements.removeClass(c1).addClass(c2);return this;},replaceClass:function(c1,c2){return this.filter('.'+c1).removeClass(c1).addClass(c2).end();},hoverClass:function(className){className=className||"hover";return this.hover(function(){$(this).addClass(className);},function(){$(this).removeClass(className);});},heightToggle:function(animated,callback){animated?this.animate({height:"toggle"},animated,callback):this.each(function(){jQuery(this)[jQuery(this).is(":hidden")?"show":"hide"]();if(callback)callback.apply(this,arguments);});},heightHide:function(animated,callback){if(animated){this.animate({height:"hide"},animated,callback);}else{this.hide();if(callback)this.each(callback);}},prepareBranches:function(settings){if(!settings.prerendered){this.filter(":last-child:not(ul)").addClass(CLASSES.last);this.filter((settings.collapsed?"":"."+CLASSES.closed)+":not(."+CLASSES.open+")").find(">ul").hide();}return this.filter(":has(>ul)");},applyClasses:function(settings,toggler){this.filter(":has(>ul):not(:has(>a))").find(">span").click(function(event){toggler.apply($(this).next());}).add($("a",this)).hoverClass();if(!settings.prerendered){this.filter(":has(>ul:hidden)").addClass(CLASSES.expandable).replaceClass(CLASSES.last,CLASSES.lastExpandable);this.not(":has(>ul:hidden)").addClass(CLASSES.collapsable).replaceClass(CLASSES.last,CLASSES.lastCollapsable);this.prepend("
        ").find("div."+CLASSES.hitarea).each(function(){var classes="";$.each($(this).parent().attr("class").split(" "),function(){classes+=this+"-hitarea ";});$(this).addClass(classes);});}this.find("div."+CLASSES.hitarea).click(toggler);},treeview:function(settings){if(typeof(window.treeCookieId) === 'undefined' || window.treeCookieId === ""){treeCookieId = "treeview";} settings=$.extend({cookieId: treeCookieId},settings);if(settings.add){return this.trigger("add",[settings.add]);}if(settings.toggle){var callback=settings.toggle;settings.toggle=function(){return callback.apply($(this).parent()[0],arguments);};}function treeController(tree,control){function handler(filter){return function(){toggler.apply($("div."+CLASSES.hitarea,tree).filter(function(){return filter?$(this).parent("."+filter).length:true;}));return false;};}$("a:eq(0)",control).click(handler(CLASSES.collapsable));$("a:eq(1)",control).click(handler(CLASSES.expandable));$("a:eq(2)",control).click(handler());}function toggler(){$(this).parent().find(">.hitarea").swapClass(CLASSES.collapsableHitarea,CLASSES.expandableHitarea).swapClass(CLASSES.lastCollapsableHitarea,CLASSES.lastExpandableHitarea).end().swapClass(CLASSES.collapsable,CLASSES.expandable).swapClass(CLASSES.lastCollapsable,CLASSES.lastExpandable).find(">ul").heightToggle(settings.animated,settings.toggle);if(settings.unique){$(this).parent().siblings().find(">.hitarea").replaceClass(CLASSES.collapsableHitarea,CLASSES.expandableHitarea).replaceClass(CLASSES.lastCollapsableHitarea,CLASSES.lastExpandableHitarea).end().replaceClass(CLASSES.collapsable,CLASSES.expandable).replaceClass(CLASSES.lastCollapsable,CLASSES.lastExpandable).find(">ul").heightHide(settings.animated,settings.toggle);}}function serialize(){function binary(arg){return arg?1:0;}var data=[];branches.each(function(i,e){data[i]=$(e).is(":has(>ul:visible)")?1:0;});$.cookie(settings.cookieId,data.join(""));}function deserialize(){var stored=$.cookie(settings.cookieId);if(stored){var data=stored.split("");branches.each(function(i,e){$(e).find(">ul")[parseInt(data[i])?"show":"hide"]();});}}this.addClass("treeview");var branches=this.find("li").prepareBranches(settings);switch(settings.persist){case"cookie":var toggleCallback=settings.toggle;settings.toggle=function(){serialize();if(toggleCallback){toggleCallback.apply(this,arguments);}};deserialize();break;case"location":var current=this.find("a").filter(function(){return this.href.toLowerCase()==location.href.toLowerCase();});if(current.length){current.addClass("selected").parents("ul, li").add(current.next()).show();}break;}branches.applyClasses(settings,toggler);if(settings.control){treeController(this,settings.control);$(settings.control).show();}return this.bind("add",function(event,branches){$(branches).prev().removeClass(CLASSES.last).removeClass(CLASSES.lastCollapsable).removeClass(CLASSES.lastExpandable).find(">.hitarea").removeClass(CLASSES.lastCollapsableHitarea).removeClass(CLASSES.lastExpandableHitarea);$(branches).find("li").andSelf().prepareBranches(settings).applyClasses(settings,toggler);});}});var CLASSES=$.fn.treeview.classes={open:"open",closed:"closed",expandable:"expandable",expandableHitarea:"expandable-hitarea",lastExpandableHitarea:"lastExpandable-hitarea",collapsable:"collapsable",collapsableHitarea:"collapsable-hitarea",lastCollapsableHitarea:"lastCollapsable-hitarea",lastCollapsable:"lastCollapsable",lastExpandable:"lastExpandable",last:"last",hitarea:"hitarea"};$.fn.Treeview=$.fn.treeview;})(jQuery); \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/content/search/nwSearchFnt.js scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/content/search/nwSearchFnt.js --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/content/search/nwSearchFnt.js 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/docs/content/search/nwSearchFnt.js 2023-03-21 16:17:04.000000000 +0000 @@ -1,513 +1,513 @@ -/*---------------------------------------------------------------------------- - * JavaScript for webhelp search - *---------------------------------------------------------------------------- - This file is part of the webhelpsearch plugin for DocBook WebHelp - Copyright (c) 2007-2008 NexWave Solutions All Rights Reserved. - www.nexwave.biz Nadege Quaine - http://kasunbg.blogspot.com/ Kasun Gajasinghe - */ - -//string initialization -var htmlfileList = "htmlFileList.js"; -var htmlfileinfoList = "htmlFileInfoList.js"; -var useCJKTokenizing = false; - -/* Cette fonction verifie la validite de la recherche entrre par l utilisateur */ -function Verifie(ditaSearch_Form) { - - // Check browser compatibitily - if (navigator.userAgent.indexOf("Konquerer") > -1) { - - alert(txt_browser_not_supported); - return; - } - - - var expressionInput = document.ditaSearch_Form.textToSearch.value; - //Set a cookie to store the searched keywords - $.cookie('textToSearch', expressionInput); - - - if (expressionInput.length < 1) { - - // expression is invalid - alert(txt_enter_at_least_1_char); - // reactive la fenetre de search (utile car cadres) - document.ditaSearch_Form.textToSearch.focus(); - } - else { - - // Effectuer la recherche - Effectuer_recherche(expressionInput); - - // reactive la fenetre de search (utile car cadres) - document.ditaSearch_Form.textToSearch.focus(); - } -} - -var stemQueryMap = new Array(); // A hashtable which maps stems to query words - -/* This function parses the search expression, loads the indices and displays the results*/ -function Effectuer_recherche(expressionInput) { - - /* Display a waiting message */ - //DisplayWaitingMessage(); - - /*data initialisation*/ - var searchFor = ""; // expression en lowercase et sans les caracte res speciaux - //w = new Object(); // hashtable, key=word, value = list of the index of the html files - scriptLetterTab = new Scriptfirstchar(); // Array containing the first letter of each word to look for - var wordsList = new Array(); // Array with the words to look for - var finalWordsList = new Array(); // Array with the words to look for after removing spaces - var linkTab = new Array(); - var fileAndWordList = new Array(); - var txt_wordsnotfound = ""; - - - /*nqu: expressionInput, la recherche est lower cased, plus remplacement des char speciaux*/ - searchFor = expressionInput.toLowerCase().replace(/<\//g, "_st_").replace(/\$_/g, "_di_").replace(/\.|%2C|%3B|%21|%3A|@|\/|\*/g, " ").replace(/(%20)+/g, " ").replace(/_st_/g, "= 0; i--) { - if (fileAndWordList[i] != undefined) { - linkTab.push("

        " + txt_results_for + " " + "" + fileAndWordList[i][0].motslisteDisplay + "" + "

        "); - - linkTab.push("
          "); - for (t in fileAndWordList[i]) { - //DEBUG: alert(": "+ fileAndWordList[i][t].filenb+" " +fileAndWordList[i][t].motsliste); - //linkTab.push("
        • "+fl[fileAndWordList[i][t].filenb]+"
        • "); - var tempInfo = fil[fileAndWordList[i][t].filenb]; - var pos1 = tempInfo.indexOf("@@@"); - var pos2 = tempInfo.lastIndexOf("@@@"); - var tempPath = tempInfo.substring(0, pos1); - var tempTitle = tempInfo.substring(pos1 + 3, pos2); - var tempShortdesc = tempInfo.substring(pos2 + 3, tempInfo.length); - - //file:///home/kasun/docbook/WEBHELP/webhelp-draft-output-format-idea/src/main/resources/web/webhelp/installation.html - var linkString = "
        • " + tempTitle + ""; - // var linkString = "
        • " + tempTitle + ""; - if ((tempShortdesc != "null")) { - linkString += "\n
          " + tempShortdesc + "
          "; - } - linkString += "
        • "; - linkTab.push(linkString); - } - linkTab.push("
        "); - } - } - } - - var results = ""; - if (linkTab.length > 0) { - /*writeln ("

        " + txt_results_for + " " + "" + cleanwordsList + "" + "
        "+"

        ");*/ - results = "

        "; - //write("

          "); - for (t in linkTab) { - results += linkTab[t].toString(); - } - results += "

          "; - } else { - results = "

          " + "Your search returned no results for " + "" + txt_wordsnotfound + "" + "

          "; - } - //alert(results); - document.getElementById('searchResults').innerHTML = results; -} - -function tokenize(wordsList){ - var stemmedWordsList = new Array(); // Array with the words to look for after removing spaces - var cleanwordsList = new Array(); // Array with the words to look for - for(var j in wordsList){ - var word = wordsList[j]; - if(typeof stemmer != "undefined" ){ - stemQueryMap[stemmer(word)] = word; - } else { - stemQueryMap[word] = word; - } - } - //stemmedWordsList is the stemmed list of words separated by spaces. - for (var t in wordsList) { - wordsList[t] = wordsList[t].replace(/(%22)|^-/g, ""); - if (wordsList[t] != "%20") { - scriptLetterTab.add(wordsList[t].charAt(0)); - cleanwordsList.push(wordsList[t]); - } - } - - if(typeof stemmer != "undefined" ){ - //Do the stemming using Porter's stemming algorithm - for (var i = 0; i < cleanwordsList.length; i++) { - var stemWord = stemmer(cleanwordsList[i]); - stemmedWordsList.push(stemWord); - } - } else { - stemmedWordsList = cleanwordsList; - } - return stemmedWordsList; -} - -//Invoker of CJKTokenizer class methods. -function cjkTokenize(wordsList){ - var allTokens= new Array(); - var notCJKTokens= new Array(); - var j=0; - for(j=0;j"; - return this.input.substring(this.offset,this.offset+2); - } - - function getAllTokens(){ - while(this.incrementToken()){ - var tmp = this.tokenize(); - this.tokens.push(tmp); - } - return this.unique(this.tokens); -// document.getElementById("content").innerHTML += tokens+" "; -// document.getElementById("content").innerHTML += "
          dada"+sortedTokens+" "; -// console.log(tokens.length+"dsdsds"); - /*for(i=0;i t2.length) { - return 1; - } else { - return -1; - } - //return t1.length - t2.length); +/*---------------------------------------------------------------------------- + * JavaScript for webhelp search + *---------------------------------------------------------------------------- + This file is part of the webhelpsearch plugin for DocBook WebHelp + Copyright (c) 2007-2008 NexWave Solutions All Rights Reserved. + www.nexwave.biz Nadege Quaine + http://kasunbg.blogspot.com/ Kasun Gajasinghe + */ + +//string initialization +var htmlfileList = "htmlFileList.js"; +var htmlfileinfoList = "htmlFileInfoList.js"; +var useCJKTokenizing = false; + +/* Cette fonction verifie la validite de la recherche entrre par l utilisateur */ +function Verifie(ditaSearch_Form) { + + // Check browser compatibitily + if (navigator.userAgent.indexOf("Konquerer") > -1) { + + alert(txt_browser_not_supported); + return; + } + + + var expressionInput = document.ditaSearch_Form.textToSearch.value; + //Set a cookie to store the searched keywords + $.cookie('textToSearch', expressionInput); + + + if (expressionInput.length < 1) { + + // expression is invalid + alert(txt_enter_at_least_1_char); + // reactive la fenetre de search (utile car cadres) + document.ditaSearch_Form.textToSearch.focus(); + } + else { + + // Effectuer la recherche + Effectuer_recherche(expressionInput); + + // reactive la fenetre de search (utile car cadres) + document.ditaSearch_Form.textToSearch.focus(); + } +} + +var stemQueryMap = new Array(); // A hashtable which maps stems to query words + +/* This function parses the search expression, loads the indices and displays the results*/ +function Effectuer_recherche(expressionInput) { + + /* Display a waiting message */ + //DisplayWaitingMessage(); + + /*data initialisation*/ + var searchFor = ""; // expression en lowercase et sans les caracte res speciaux + //w = new Object(); // hashtable, key=word, value = list of the index of the html files + scriptLetterTab = new Scriptfirstchar(); // Array containing the first letter of each word to look for + var wordsList = new Array(); // Array with the words to look for + var finalWordsList = new Array(); // Array with the words to look for after removing spaces + var linkTab = new Array(); + var fileAndWordList = new Array(); + var txt_wordsnotfound = ""; + + + /*nqu: expressionInput, la recherche est lower cased, plus remplacement des char speciaux*/ + searchFor = expressionInput.toLowerCase().replace(/<\//g, "_st_").replace(/\$_/g, "_di_").replace(/\.|%2C|%3B|%21|%3A|@|\/|\*/g, " ").replace(/(%20)+/g, " ").replace(/_st_/g, "= 0; i--) { + if (fileAndWordList[i] != undefined) { + linkTab.push("

          " + txt_results_for + " " + "" + fileAndWordList[i][0].motslisteDisplay + "" + "

          "); + + linkTab.push("
            "); + for (t in fileAndWordList[i]) { + //DEBUG: alert(": "+ fileAndWordList[i][t].filenb+" " +fileAndWordList[i][t].motsliste); + //linkTab.push("
          • "+fl[fileAndWordList[i][t].filenb]+"
          • "); + var tempInfo = fil[fileAndWordList[i][t].filenb]; + var pos1 = tempInfo.indexOf("@@@"); + var pos2 = tempInfo.lastIndexOf("@@@"); + var tempPath = tempInfo.substring(0, pos1); + var tempTitle = tempInfo.substring(pos1 + 3, pos2); + var tempShortdesc = tempInfo.substring(pos2 + 3, tempInfo.length); + + //file:///home/kasun/docbook/WEBHELP/webhelp-draft-output-format-idea/src/main/resources/web/webhelp/installation.html + var linkString = "
          • " + tempTitle + ""; + // var linkString = "
          • " + tempTitle + ""; + if ((tempShortdesc != "null")) { + linkString += "\n
            " + tempShortdesc + "
            "; + } + linkString += "
          • "; + linkTab.push(linkString); + } + linkTab.push("
          "); + } + } + } + + var results = ""; + if (linkTab.length > 0) { + /*writeln ("

          " + txt_results_for + " " + "" + cleanwordsList + "" + "
          "+"

          ");*/ + results = "

          "; + //write("

            "); + for (t in linkTab) { + results += linkTab[t].toString(); + } + results += "

            "; + } else { + results = "

            " + "Your search returned no results for " + "" + txt_wordsnotfound + "" + "

            "; + } + //alert(results); + document.getElementById('searchResults').innerHTML = results; +} + +function tokenize(wordsList){ + var stemmedWordsList = new Array(); // Array with the words to look for after removing spaces + var cleanwordsList = new Array(); // Array with the words to look for + for(var j in wordsList){ + var word = wordsList[j]; + if(typeof stemmer != "undefined" ){ + stemQueryMap[stemmer(word)] = word; + } else { + stemQueryMap[word] = word; + } + } + //stemmedWordsList is the stemmed list of words separated by spaces. + for (var t in wordsList) { + wordsList[t] = wordsList[t].replace(/(%22)|^-/g, ""); + if (wordsList[t] != "%20") { + scriptLetterTab.add(wordsList[t].charAt(0)); + cleanwordsList.push(wordsList[t]); + } + } + + if(typeof stemmer != "undefined" ){ + //Do the stemming using Porter's stemming algorithm + for (var i = 0; i < cleanwordsList.length; i++) { + var stemWord = stemmer(cleanwordsList[i]); + stemmedWordsList.push(stemWord); + } + } else { + stemmedWordsList = cleanwordsList; + } + return stemmedWordsList; +} + +//Invoker of CJKTokenizer class methods. +function cjkTokenize(wordsList){ + var allTokens= new Array(); + var notCJKTokens= new Array(); + var j=0; + for(j=0;j"; + return this.input.substring(this.offset,this.offset+2); + } + + function getAllTokens(){ + while(this.incrementToken()){ + var tmp = this.tokenize(); + this.tokens.push(tmp); + } + return this.unique(this.tokens); +// document.getElementById("content").innerHTML += tokens+" "; +// document.getElementById("content").innerHTML += "
            dada"+sortedTokens+" "; +// console.log(tokens.length+"dsdsds"); + /*for(i=0;i t2.length) { + return 1; + } else { + return -1; + } + //return t1.length - t2.length); } \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/common/jquery/treeview/jquery.treeview.async.js scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/common/jquery/treeview/jquery.treeview.async.js --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/common/jquery/treeview/jquery.treeview.async.js 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/common/jquery/treeview/jquery.treeview.async.js 2023-03-21 16:17:04.000000000 +0000 @@ -1,72 +1,72 @@ -/* - * Async Treeview 0.1 - Lazy-loading extension for Treeview - * - * http://bassistance.de/jquery-plugins/jquery-plugin-treeview/ - * - * Copyright (c) 2007 Jörn Zaefferer - * - * Dual licensed under the MIT and GPL licenses: - * http://www.opensource.org/licenses/mit-license.php - * http://www.gnu.org/licenses/gpl.html - * - * Revision: $Id$ - * - */ - -;(function($) { - -function load(settings, root, child, container) { - $.getJSON(settings.url, {root: root}, function(response) { - function createNode(parent) { - var current = $("
          • ").attr("id", this.id || "").html("" + this.text + "").appendTo(parent); - if (this.classes) { - current.children("span").addClass(this.classes); - } - if (this.expanded) { - current.addClass("open"); - } - if (this.hasChildren || this.children && this.children.length) { - var branch = $("
              ").appendTo(current); - if (this.hasChildren) { - current.addClass("hasChildren"); - createNode.call({ - text:"placeholder", - id:"placeholder", - children:[] - }, branch); - } - if (this.children && this.children.length) { - $.each(this.children, createNode, [branch]) - } - } - } - $.each(response, createNode, [child]); - $(container).treeview({add: child}); - }); -} - -var proxied = $.fn.treeview; -$.fn.treeview = function(settings) { - if (!settings.url) { - return proxied.apply(this, arguments); - } - var container = this; - load(settings, "source", this, container); - var userToggle = settings.toggle; - return proxied.call(this, $.extend({}, settings, { - collapsed: true, - toggle: function() { - var $this = $(this); - if ($this.hasClass("hasChildren")) { - var childList = $this.removeClass("hasChildren").find("ul"); - childList.empty(); - load(settings, this.id, childList, container); - } - if (userToggle) { - userToggle.apply(this, arguments); - } - } - })); -}; - +/* + * Async Treeview 0.1 - Lazy-loading extension for Treeview + * + * http://bassistance.de/jquery-plugins/jquery-plugin-treeview/ + * + * Copyright (c) 2007 Jörn Zaefferer + * + * Dual licensed under the MIT and GPL licenses: + * http://www.opensource.org/licenses/mit-license.php + * http://www.gnu.org/licenses/gpl.html + * + * Revision: $Id$ + * + */ + +;(function($) { + +function load(settings, root, child, container) { + $.getJSON(settings.url, {root: root}, function(response) { + function createNode(parent) { + var current = $("
            • ").attr("id", this.id || "").html("" + this.text + "").appendTo(parent); + if (this.classes) { + current.children("span").addClass(this.classes); + } + if (this.expanded) { + current.addClass("open"); + } + if (this.hasChildren || this.children && this.children.length) { + var branch = $("
                ").appendTo(current); + if (this.hasChildren) { + current.addClass("hasChildren"); + createNode.call({ + text:"placeholder", + id:"placeholder", + children:[] + }, branch); + } + if (this.children && this.children.length) { + $.each(this.children, createNode, [branch]) + } + } + } + $.each(response, createNode, [child]); + $(container).treeview({add: child}); + }); +} + +var proxied = $.fn.treeview; +$.fn.treeview = function(settings) { + if (!settings.url) { + return proxied.apply(this, arguments); + } + var container = this; + load(settings, "source", this, container); + var userToggle = settings.toggle; + return proxied.call(this, $.extend({}, settings, { + collapsed: true, + toggle: function() { + var $this = $(this); + if ($this.hasClass("hasChildren")) { + var childList = $this.removeClass("hasChildren").find("ul"); + childList.empty(); + load(settings, this.id, childList, container); + } + if (userToggle) { + userToggle.apply(this, arguments); + } + } + })); +}; + })(jQuery); \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/common/jquery/treeview/jquery.treeview.css scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/common/jquery/treeview/jquery.treeview.css --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/common/jquery/treeview/jquery.treeview.css 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/common/jquery/treeview/jquery.treeview.css 2023-03-21 16:17:04.000000000 +0000 @@ -1,85 +1,85 @@ -.treeview, .treeview ul { - padding: 0; - margin: 0; - list-style: none; -} - -.treeview ul { - background-color: white; - margin-top: 4px; -} - -.treeview .hitarea { - background: url(images/treeview-default.gif) -64px -25px no-repeat; - height: 16px; - width: 16px; - margin-left: -16px; - float: left; - cursor: pointer; -} -/* fix for IE6 */ -* html .hitarea { - display: inline; - float:none; -} - -.treeview li { - margin: 0; - padding: 3px 0 3px 16px; -} - -.treeview a.selected { - background-color: #eee; -} - -#treecontrol { margin: 1em 0; display: none; } - -.treeview .hover { color: red; cursor: pointer; } - -.treeview li { background: url(images/treeview-default-line.gif) 0 0 no-repeat; } -.treeview li.collapsable, .treeview li.expandable { background-position: 0 -176px; } - -.treeview .expandable-hitarea { background-position: -80px -3px; } - -.treeview li.last { background-position: 0 -1766px } -.treeview li.lastCollapsable, .treeview li.lastExpandable { background-image: url(images/treeview-default.gif); } -.treeview li.lastCollapsable { background-position: 0 -111px } -.treeview li.lastExpandable { background-position: -32px -67px } - -.treeview div.lastCollapsable-hitarea, .treeview div.lastExpandable-hitarea { background-position: 0; } - -.treeview-red li { background-image: url(images/treeview-red-line.gif); } -.treeview-red .hitarea, .treeview-red li.lastCollapsable, .treeview-red li.lastExpandable { background-image: url(images/treeview-red.gif); } - -.treeview-black li { background-image: url(images/treeview-black-line.gif); } -.treeview-black .hitarea, .treeview-black li.lastCollapsable, .treeview-black li.lastExpandable { background-image: url(images/treeview-black.gif); } - -.treeview-gray li { background-image: url(images/treeview-gray-line.gif); } -.treeview-gray .hitarea, .treeview-gray li.lastCollapsable, .treeview-gray li.lastExpandable { background-image: url(images/treeview-gray.gif); } - -.treeview-famfamfam li { background-image: url(images/treeview-famfamfam-line.gif); } -.treeview-famfamfam .hitarea, .treeview-famfamfam li.lastCollapsable, .treeview-famfamfam li.lastExpandable { background-image: url(images/treeview-famfamfam.gif); } - - -.filetree li { padding: 3px 0 2px 16px; } -.filetree span.folder, .filetree span.file { padding: 1px 0 1px 16px; display: block; } -.filetree span.folder { background: url(images/folder.gif) 0 0 no-repeat; } -.filetree li.expandable span.folder { background: url(images/folder-closed.gif) 0 0 no-repeat; } -.filetree span.file { background: url(images/file.gif) 0 0 no-repeat; } - -html, body {height:100%; margin: 0; padding: 0; } - -/* -html>body { - font-size: 16px; - font-size: 68.75%; -} Reset Base Font Size */ - /* -body { - font-family: Verdana, helvetica, arial, sans-serif; - font-size: 68.75%; - background: #fff; - color: #333; -} */ - +.treeview, .treeview ul { + padding: 0; + margin: 0; + list-style: none; +} + +.treeview ul { + background-color: white; + margin-top: 4px; +} + +.treeview .hitarea { + background: url(images/treeview-default.gif) -64px -25px no-repeat; + height: 16px; + width: 16px; + margin-left: -16px; + float: left; + cursor: pointer; +} +/* fix for IE6 */ +* html .hitarea { + display: inline; + float:none; +} + +.treeview li { + margin: 0; + padding: 3px 0 3px 16px; +} + +.treeview a.selected { + background-color: #eee; +} + +#treecontrol { margin: 1em 0; display: none; } + +.treeview .hover { color: red; cursor: pointer; } + +.treeview li { background: url(images/treeview-default-line.gif) 0 0 no-repeat; } +.treeview li.collapsable, .treeview li.expandable { background-position: 0 -176px; } + +.treeview .expandable-hitarea { background-position: -80px -3px; } + +.treeview li.last { background-position: 0 -1766px } +.treeview li.lastCollapsable, .treeview li.lastExpandable { background-image: url(images/treeview-default.gif); } +.treeview li.lastCollapsable { background-position: 0 -111px } +.treeview li.lastExpandable { background-position: -32px -67px } + +.treeview div.lastCollapsable-hitarea, .treeview div.lastExpandable-hitarea { background-position: 0; } + +.treeview-red li { background-image: url(images/treeview-red-line.gif); } +.treeview-red .hitarea, .treeview-red li.lastCollapsable, .treeview-red li.lastExpandable { background-image: url(images/treeview-red.gif); } + +.treeview-black li { background-image: url(images/treeview-black-line.gif); } +.treeview-black .hitarea, .treeview-black li.lastCollapsable, .treeview-black li.lastExpandable { background-image: url(images/treeview-black.gif); } + +.treeview-gray li { background-image: url(images/treeview-gray-line.gif); } +.treeview-gray .hitarea, .treeview-gray li.lastCollapsable, .treeview-gray li.lastExpandable { background-image: url(images/treeview-gray.gif); } + +.treeview-famfamfam li { background-image: url(images/treeview-famfamfam-line.gif); } +.treeview-famfamfam .hitarea, .treeview-famfamfam li.lastCollapsable, .treeview-famfamfam li.lastExpandable { background-image: url(images/treeview-famfamfam.gif); } + + +.filetree li { padding: 3px 0 2px 16px; } +.filetree span.folder, .filetree span.file { padding: 1px 0 1px 16px; display: block; } +.filetree span.folder { background: url(images/folder.gif) 0 0 no-repeat; } +.filetree li.expandable span.folder { background: url(images/folder-closed.gif) 0 0 no-repeat; } +.filetree span.file { background: url(images/file.gif) 0 0 no-repeat; } + +html, body {height:100%; margin: 0; padding: 0; } + +/* +html>body { + font-size: 16px; + font-size: 68.75%; +} Reset Base Font Size */ + /* +body { + font-family: Verdana, helvetica, arial, sans-serif; + font-size: 68.75%; + background: #fff; + color: #333; +} */ + a img { border: none; } \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/common/jquery/treeview/jquery.treeview.min.js scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/common/jquery/treeview/jquery.treeview.min.js --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/common/jquery/treeview/jquery.treeview.min.js 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/common/jquery/treeview/jquery.treeview.min.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -/* - * Treeview 1.4 - jQuery plugin to hide and show branches of a tree - * - * http://bassistance.de/jquery-plugins/jquery-plugin-treeview/ - * http://docs.jquery.com/Plugins/Treeview - * - * Copyright (c) 2007 Jörn Zaefferer - * - * Dual licensed under the MIT and GPL licenses: - * http://www.opensource.org/licenses/mit-license.php - * http://www.gnu.org/licenses/gpl.html - * - * Revision: $Id: jquery.treeview.js 4684 2008-02-07 19:08:06Z joern.zaefferer $ - * kasunbg: changed the cookieid name - * - */;(function($){$.extend($.fn,{swapClass:function(c1,c2){var c1Elements=this.filter('.'+c1);this.filter('.'+c2).removeClass(c2).addClass(c1);c1Elements.removeClass(c1).addClass(c2);return this;},replaceClass:function(c1,c2){return this.filter('.'+c1).removeClass(c1).addClass(c2).end();},hoverClass:function(className){className=className||"hover";return this.hover(function(){$(this).addClass(className);},function(){$(this).removeClass(className);});},heightToggle:function(animated,callback){animated?this.animate({height:"toggle"},animated,callback):this.each(function(){jQuery(this)[jQuery(this).is(":hidden")?"show":"hide"]();if(callback)callback.apply(this,arguments);});},heightHide:function(animated,callback){if(animated){this.animate({height:"hide"},animated,callback);}else{this.hide();if(callback)this.each(callback);}},prepareBranches:function(settings){if(!settings.prerendered){this.filter(":last-child:not(ul)").addClass(CLASSES.last);this.filter((settings.collapsed?"":"."+CLASSES.closed)+":not(."+CLASSES.open+")").find(">ul").hide();}return this.filter(":has(>ul)");},applyClasses:function(settings,toggler){this.filter(":has(>ul):not(:has(>a))").find(">span").click(function(event){toggler.apply($(this).next());}).add($("a",this)).hoverClass();if(!settings.prerendered){this.filter(":has(>ul:hidden)").addClass(CLASSES.expandable).replaceClass(CLASSES.last,CLASSES.lastExpandable);this.not(":has(>ul:hidden)").addClass(CLASSES.collapsable).replaceClass(CLASSES.last,CLASSES.lastCollapsable);this.prepend("
                ").find("div."+CLASSES.hitarea).each(function(){var classes="";$.each($(this).parent().attr("class").split(" "),function(){classes+=this+"-hitarea ";});$(this).addClass(classes);});}this.find("div."+CLASSES.hitarea).click(toggler);},treeview:function(settings){if(typeof(window.treeCookieId) === 'undefined' || window.treeCookieId === ""){treeCookieId = "treeview";} settings=$.extend({cookieId: treeCookieId},settings);if(settings.add){return this.trigger("add",[settings.add]);}if(settings.toggle){var callback=settings.toggle;settings.toggle=function(){return callback.apply($(this).parent()[0],arguments);};}function treeController(tree,control){function handler(filter){return function(){toggler.apply($("div."+CLASSES.hitarea,tree).filter(function(){return filter?$(this).parent("."+filter).length:true;}));return false;};}$("a:eq(0)",control).click(handler(CLASSES.collapsable));$("a:eq(1)",control).click(handler(CLASSES.expandable));$("a:eq(2)",control).click(handler());}function toggler(){$(this).parent().find(">.hitarea").swapClass(CLASSES.collapsableHitarea,CLASSES.expandableHitarea).swapClass(CLASSES.lastCollapsableHitarea,CLASSES.lastExpandableHitarea).end().swapClass(CLASSES.collapsable,CLASSES.expandable).swapClass(CLASSES.lastCollapsable,CLASSES.lastExpandable).find(">ul").heightToggle(settings.animated,settings.toggle);if(settings.unique){$(this).parent().siblings().find(">.hitarea").replaceClass(CLASSES.collapsableHitarea,CLASSES.expandableHitarea).replaceClass(CLASSES.lastCollapsableHitarea,CLASSES.lastExpandableHitarea).end().replaceClass(CLASSES.collapsable,CLASSES.expandable).replaceClass(CLASSES.lastCollapsable,CLASSES.lastExpandable).find(">ul").heightHide(settings.animated,settings.toggle);}}function serialize(){function binary(arg){return arg?1:0;}var data=[];branches.each(function(i,e){data[i]=$(e).is(":has(>ul:visible)")?1:0;});$.cookie(settings.cookieId,data.join(""));}function deserialize(){var stored=$.cookie(settings.cookieId);if(stored){var data=stored.split("");branches.each(function(i,e){$(e).find(">ul")[parseInt(data[i])?"show":"hide"]();});}}this.addClass("treeview");var branches=this.find("li").prepareBranches(settings);switch(settings.persist){case"cookie":var toggleCallback=settings.toggle;settings.toggle=function(){serialize();if(toggleCallback){toggleCallback.apply(this,arguments);}};deserialize();break;case"location":var current=this.find("a").filter(function(){return this.href.toLowerCase()==location.href.toLowerCase();});if(current.length){current.addClass("selected").parents("ul, li").add(current.next()).show();}break;}branches.applyClasses(settings,toggler);if(settings.control){treeController(this,settings.control);$(settings.control).show();}return this.bind("add",function(event,branches){$(branches).prev().removeClass(CLASSES.last).removeClass(CLASSES.lastCollapsable).removeClass(CLASSES.lastExpandable).find(">.hitarea").removeClass(CLASSES.lastCollapsableHitarea).removeClass(CLASSES.lastExpandableHitarea);$(branches).find("li").andSelf().prepareBranches(settings).applyClasses(settings,toggler);});}});var CLASSES=$.fn.treeview.classes={open:"open",closed:"closed",expandable:"expandable",expandableHitarea:"expandable-hitarea",lastExpandableHitarea:"lastExpandable-hitarea",collapsable:"collapsable",collapsableHitarea:"collapsable-hitarea",lastCollapsableHitarea:"lastCollapsable-hitarea",lastCollapsable:"lastCollapsable",lastExpandable:"lastExpandable",last:"last",hitarea:"hitarea"};$.fn.Treeview=$.fn.treeview;})(jQuery); \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/en-us.props scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/en-us.props --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/en-us.props 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/en-us.props 2023-03-21 16:17:04.000000000 +0000 @@ -1,45 +1,45 @@ -DEF01=this -DEF02=is -DEF03=the -DEF04=in -DEF05=i -DEF06=on -DEF07=a -DEF08=about -DEF09=an -DEF10=are -DEF11=as -DEF12=at -DEF13=be -DEF14=by -DEF15=com -DEF16=de -DEF17=en -DEF18=for -DEF19=from -DEF20=how -DEF21=it -DEF22=la -DEF23=of -DEF24=on -DEF25=or -DEF26=that -DEF27=to -DEF28=was -DEF29=what -DEF30=when -DEF31=where -DEF32=who -DEF33=will -DEF34=with -DEF35=und -DEF36=Next -DEF37=Prev -DEF38=Home -DEF39=Motive -DEF40=Inc -DEF41=Copyright -DEF42=All -DEF43=rights -DEF44=reserved +DEF01=this +DEF02=is +DEF03=the +DEF04=in +DEF05=i +DEF06=on +DEF07=a +DEF08=about +DEF09=an +DEF10=are +DEF11=as +DEF12=at +DEF13=be +DEF14=by +DEF15=com +DEF16=de +DEF17=en +DEF18=for +DEF19=from +DEF20=how +DEF21=it +DEF22=la +DEF23=of +DEF24=on +DEF25=or +DEF26=that +DEF27=to +DEF28=was +DEF29=what +DEF30=when +DEF31=where +DEF32=who +DEF33=will +DEF34=with +DEF35=und +DEF36=Next +DEF37=Prev +DEF38=Home +DEF39=Motive +DEF40=Inc +DEF41=Copyright +DEF42=All +DEF43=rights +DEF44=reserved DEF45=Up \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/es-es.props scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/es-es.props --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/es-es.props 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/es-es.props 2023-03-21 16:17:04.000000000 +0000 @@ -1,179 +1,179 @@ -DEF01=un -DEF02=una -DEF03=unas -DEF04=unos -DEF05=uno -DEF06=sobre -DEF07=todo -DEF08=tambin -DEF09=tras -DEF10=otro -DEF11=algn -DEF12=alguno -DEF13=alguna -DEF14=algunos -DEF15=algunas -DEF16=ser -DEF17=es -DEF18=soy -DEF19=eres -DEF20=somos -DEF21=sois -DEF22=estoy -DEF23=esta -DEF24=estamos -DEF25=estais -DEF26=estan -DEF27=como -DEF28=en -DEF29=para -DEF30=atras -DEF31=porque -DEF32=por -DEF33=estado -DEF34=estaba -DEF35=ante -DEF36=antes -DEF37=siendo -DEF38=ambos -DEF39=pero -DEF40=por -DEF41=poder -DEF42=puede -DEF43=puedo -DEF44=podemos -DEF45=podeis -DEF46=pueden -DEF47=fui -DEF48=fue -DEF49=fuimos -DEF50=fueron -DEF51=hacer -DEF52=hago -DEF53=hace -DEF54=hacemos -DEF55=haceis -DEF56=hacen -DEF57=cada -DEF58=fin -DEF59=incluso -DEF60=primero -DEF61=desde -DEF62=conseguir -DEF63=consigo -DEF64=consigue -DEF65=consigues -DEF66=conseguimos -DEF67=consiguen -DEF68=ir -DEF69=voy -DEF70=va -DEF71=vamos -DEF72=vais -DEF73=van -DEF74=vaya -DEF75=gueno -DEF76=ha -DEF77=tener -DEF78=tengo -DEF79=tiene -DEF80=tenemos -DEF81=teneis -DEF82=tienen -DEF83=el -DEF84=la -DEF85=lo -DEF86=las -DEF87=los -DEF88=su -DEF89=aqui -DEF90=mio -DEF91=tuyo -DEF92=ellos -DEF93=ellas -DEF94=nos -DEF95=nosotros -DEF96=vosotros -DEF97=vosotras -DEF98=si -DEF99=dentro -DEF100=solo -DEF101=solamente -DEF102=saber -DEF103=sabes -DEF104=sabe -DEF105=sabemos -DEF106=sabeis -DEF107=saben -DEF108=ultimo -DEF109=largo -DEF110=bastante -DEF111=haces -DEF112=muchos -DEF113=aquellos -DEF114=aquellas -DEF115=sus -DEF116=entonces -DEF117=tiempo -DEF118=verdad -DEF119=verdadero -DEF120=verdadera -DEF121=cierto -DEF122=ciertos -DEF123=cierta -DEF124=ciertas -DEF125=intentar -DEF126=intento -DEF127=intenta -DEF128=intentas -DEF129=intentamos -DEF130=intentais -DEF131=intentan -DEF132=dos -DEF133=bajo -DEF134=arriba -DEF135=encima -DEF136=usar -DEF137=uso -DEF138=usas -DEF139=usa -DEF140=usamos -DEF141=usais -DEF142=usan -DEF143=emplear -DEF144=empleo -DEF145=empleas -DEF146=emplean -DEF147=ampleamos -DEF148=empleais -DEF149=valor -DEF150=muy -DEF151=era -DEF152=eras -DEF153=eramos -DEF154=eran -DEF155=modo -DEF156=bien -DEF157=cual -DEF158=cuando -DEF159=donde -DEF160=mientras -DEF161=quien -DEF162=con -DEF163=entre -DEF164=sin -DEF165=trabajo -DEF166=trabajar -DEF167=trabajas -DEF168=trabaja -DEF169=trabajamos -DEF170=trabajais -DEF171=trabajan -DEF172=podria -DEF173=podrias -DEF174=podriamos -DEF175=podrian -DEF176=podriais -DEF177=yo -DEF178=aquel +DEF01=un +DEF02=una +DEF03=unas +DEF04=unos +DEF05=uno +DEF06=sobre +DEF07=todo +DEF08=tambin +DEF09=tras +DEF10=otro +DEF11=algn +DEF12=alguno +DEF13=alguna +DEF14=algunos +DEF15=algunas +DEF16=ser +DEF17=es +DEF18=soy +DEF19=eres +DEF20=somos +DEF21=sois +DEF22=estoy +DEF23=esta +DEF24=estamos +DEF25=estais +DEF26=estan +DEF27=como +DEF28=en +DEF29=para +DEF30=atras +DEF31=porque +DEF32=por +DEF33=estado +DEF34=estaba +DEF35=ante +DEF36=antes +DEF37=siendo +DEF38=ambos +DEF39=pero +DEF40=por +DEF41=poder +DEF42=puede +DEF43=puedo +DEF44=podemos +DEF45=podeis +DEF46=pueden +DEF47=fui +DEF48=fue +DEF49=fuimos +DEF50=fueron +DEF51=hacer +DEF52=hago +DEF53=hace +DEF54=hacemos +DEF55=haceis +DEF56=hacen +DEF57=cada +DEF58=fin +DEF59=incluso +DEF60=primero +DEF61=desde +DEF62=conseguir +DEF63=consigo +DEF64=consigue +DEF65=consigues +DEF66=conseguimos +DEF67=consiguen +DEF68=ir +DEF69=voy +DEF70=va +DEF71=vamos +DEF72=vais +DEF73=van +DEF74=vaya +DEF75=gueno +DEF76=ha +DEF77=tener +DEF78=tengo +DEF79=tiene +DEF80=tenemos +DEF81=teneis +DEF82=tienen +DEF83=el +DEF84=la +DEF85=lo +DEF86=las +DEF87=los +DEF88=su +DEF89=aqui +DEF90=mio +DEF91=tuyo +DEF92=ellos +DEF93=ellas +DEF94=nos +DEF95=nosotros +DEF96=vosotros +DEF97=vosotras +DEF98=si +DEF99=dentro +DEF100=solo +DEF101=solamente +DEF102=saber +DEF103=sabes +DEF104=sabe +DEF105=sabemos +DEF106=sabeis +DEF107=saben +DEF108=ultimo +DEF109=largo +DEF110=bastante +DEF111=haces +DEF112=muchos +DEF113=aquellos +DEF114=aquellas +DEF115=sus +DEF116=entonces +DEF117=tiempo +DEF118=verdad +DEF119=verdadero +DEF120=verdadera +DEF121=cierto +DEF122=ciertos +DEF123=cierta +DEF124=ciertas +DEF125=intentar +DEF126=intento +DEF127=intenta +DEF128=intentas +DEF129=intentamos +DEF130=intentais +DEF131=intentan +DEF132=dos +DEF133=bajo +DEF134=arriba +DEF135=encima +DEF136=usar +DEF137=uso +DEF138=usas +DEF139=usa +DEF140=usamos +DEF141=usais +DEF142=usan +DEF143=emplear +DEF144=empleo +DEF145=empleas +DEF146=emplean +DEF147=ampleamos +DEF148=empleais +DEF149=valor +DEF150=muy +DEF151=era +DEF152=eras +DEF153=eramos +DEF154=eran +DEF155=modo +DEF156=bien +DEF157=cual +DEF158=cuando +DEF159=donde +DEF160=mientras +DEF161=quien +DEF162=con +DEF163=entre +DEF164=sin +DEF165=trabajo +DEF166=trabajar +DEF167=trabajas +DEF168=trabaja +DEF169=trabajamos +DEF170=trabajais +DEF171=trabajan +DEF172=podria +DEF173=podrias +DEF174=podriamos +DEF175=podrian +DEF176=podriais +DEF177=yo +DEF178=aquel DEF179=qu \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/nwSearchFnt.js scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/nwSearchFnt.js --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/nwSearchFnt.js 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/nwSearchFnt.js 2023-03-21 16:17:04.000000000 +0000 @@ -1,513 +1,513 @@ -/*---------------------------------------------------------------------------- - * JavaScript for webhelp search - *---------------------------------------------------------------------------- - This file is part of the webhelpsearch plugin for DocBook WebHelp - Copyright (c) 2007-2008 NexWave Solutions All Rights Reserved. - www.nexwave.biz Nadege Quaine - http://kasunbg.blogspot.com/ Kasun Gajasinghe - */ - -//string initialization -var htmlfileList = "htmlFileList.js"; -var htmlfileinfoList = "htmlFileInfoList.js"; -var useCJKTokenizing = false; - -/* Cette fonction verifie la validite de la recherche entrre par l utilisateur */ -function Verifie(ditaSearch_Form) { - - // Check browser compatibitily - if (navigator.userAgent.indexOf("Konquerer") > -1) { - - alert(txt_browser_not_supported); - return; - } - - - var expressionInput = document.ditaSearch_Form.textToSearch.value; - //Set a cookie to store the searched keywords - $.cookie('textToSearch', expressionInput); - - - if (expressionInput.length < 1) { - - // expression is invalid - alert(txt_enter_at_least_1_char); - // reactive la fenetre de search (utile car cadres) - document.ditaSearch_Form.textToSearch.focus(); - } - else { - - // Effectuer la recherche - Effectuer_recherche(expressionInput); - - // reactive la fenetre de search (utile car cadres) - document.ditaSearch_Form.textToSearch.focus(); - } -} - -var stemQueryMap = new Array(); // A hashtable which maps stems to query words - -/* This function parses the search expression, loads the indices and displays the results*/ -function Effectuer_recherche(expressionInput) { - - /* Display a waiting message */ - //DisplayWaitingMessage(); - - /*data initialisation*/ - var searchFor = ""; // expression en lowercase et sans les caracte res speciaux - //w = new Object(); // hashtable, key=word, value = list of the index of the html files - scriptLetterTab = new Scriptfirstchar(); // Array containing the first letter of each word to look for - var wordsList = new Array(); // Array with the words to look for - var finalWordsList = new Array(); // Array with the words to look for after removing spaces - var linkTab = new Array(); - var fileAndWordList = new Array(); - var txt_wordsnotfound = ""; - - - /*nqu: expressionInput, la recherche est lower cased, plus remplacement des char speciaux*/ - searchFor = expressionInput.toLowerCase().replace(/<\//g, "_st_").replace(/\$_/g, "_di_").replace(/\.|%2C|%3B|%21|%3A|@|\/|\*/g, " ").replace(/(%20)+/g, " ").replace(/_st_/g, "= 0; i--) { - if (fileAndWordList[i] != undefined) { - linkTab.push("

                " + txt_results_for + " " + "" + fileAndWordList[i][0].motslisteDisplay + "" + "

                "); - - linkTab.push("
                  "); - for (t in fileAndWordList[i]) { - //DEBUG: alert(": "+ fileAndWordList[i][t].filenb+" " +fileAndWordList[i][t].motsliste); - //linkTab.push("
                • "+fl[fileAndWordList[i][t].filenb]+"
                • "); - var tempInfo = fil[fileAndWordList[i][t].filenb]; - var pos1 = tempInfo.indexOf("@@@"); - var pos2 = tempInfo.lastIndexOf("@@@"); - var tempPath = tempInfo.substring(0, pos1); - var tempTitle = tempInfo.substring(pos1 + 3, pos2); - var tempShortdesc = tempInfo.substring(pos2 + 3, tempInfo.length); - - //file:///home/kasun/docbook/WEBHELP/webhelp-draft-output-format-idea/src/main/resources/web/webhelp/installation.html - var linkString = "
                • " + tempTitle + ""; - // var linkString = "
                • " + tempTitle + ""; - if ((tempShortdesc != "null")) { - linkString += "\n
                  " + tempShortdesc + "
                  "; - } - linkString += "
                • "; - linkTab.push(linkString); - } - linkTab.push("
                "); - } - } - } - - var results = ""; - if (linkTab.length > 0) { - /*writeln ("

                " + txt_results_for + " " + "" + cleanwordsList + "" + "
                "+"

                ");*/ - results = "

                "; - //write("

                  "); - for (t in linkTab) { - results += linkTab[t].toString(); - } - results += "

                  "; - } else { - results = "

                  " + "Your search returned no results for " + "" + txt_wordsnotfound + "" + "

                  "; - } - //alert(results); - document.getElementById('searchResults').innerHTML = results; -} - -function tokenize(wordsList){ - var stemmedWordsList = new Array(); // Array with the words to look for after removing spaces - var cleanwordsList = new Array(); // Array with the words to look for - for(var j in wordsList){ - var word = wordsList[j]; - if(typeof stemmer != "undefined" ){ - stemQueryMap[stemmer(word)] = word; - } else { - stemQueryMap[word] = word; - } - } - //stemmedWordsList is the stemmed list of words separated by spaces. - for (var t in wordsList) { - wordsList[t] = wordsList[t].replace(/(%22)|^-/g, ""); - if (wordsList[t] != "%20") { - scriptLetterTab.add(wordsList[t].charAt(0)); - cleanwordsList.push(wordsList[t]); - } - } - - if(typeof stemmer != "undefined" ){ - //Do the stemming using Porter's stemming algorithm - for (var i = 0; i < cleanwordsList.length; i++) { - var stemWord = stemmer(cleanwordsList[i]); - stemmedWordsList.push(stemWord); - } - } else { - stemmedWordsList = cleanwordsList; - } - return stemmedWordsList; -} - -//Invoker of CJKTokenizer class methods. -function cjkTokenize(wordsList){ - var allTokens= new Array(); - var notCJKTokens= new Array(); - var j=0; - for(j=0;j"; - return this.input.substring(this.offset,this.offset+2); - } - - function getAllTokens(){ - while(this.incrementToken()){ - var tmp = this.tokenize(); - this.tokens.push(tmp); - } - return this.unique(this.tokens); -// document.getElementById("content").innerHTML += tokens+" "; -// document.getElementById("content").innerHTML += "
                  dada"+sortedTokens+" "; -// console.log(tokens.length+"dsdsds"); - /*for(i=0;i t2.length) { - return 1; - } else { - return -1; - } - //return t1.length - t2.length); +/*---------------------------------------------------------------------------- + * JavaScript for webhelp search + *---------------------------------------------------------------------------- + This file is part of the webhelpsearch plugin for DocBook WebHelp + Copyright (c) 2007-2008 NexWave Solutions All Rights Reserved. + www.nexwave.biz Nadege Quaine + http://kasunbg.blogspot.com/ Kasun Gajasinghe + */ + +//string initialization +var htmlfileList = "htmlFileList.js"; +var htmlfileinfoList = "htmlFileInfoList.js"; +var useCJKTokenizing = false; + +/* Cette fonction verifie la validite de la recherche entrre par l utilisateur */ +function Verifie(ditaSearch_Form) { + + // Check browser compatibitily + if (navigator.userAgent.indexOf("Konquerer") > -1) { + + alert(txt_browser_not_supported); + return; + } + + + var expressionInput = document.ditaSearch_Form.textToSearch.value; + //Set a cookie to store the searched keywords + $.cookie('textToSearch', expressionInput); + + + if (expressionInput.length < 1) { + + // expression is invalid + alert(txt_enter_at_least_1_char); + // reactive la fenetre de search (utile car cadres) + document.ditaSearch_Form.textToSearch.focus(); + } + else { + + // Effectuer la recherche + Effectuer_recherche(expressionInput); + + // reactive la fenetre de search (utile car cadres) + document.ditaSearch_Form.textToSearch.focus(); + } +} + +var stemQueryMap = new Array(); // A hashtable which maps stems to query words + +/* This function parses the search expression, loads the indices and displays the results*/ +function Effectuer_recherche(expressionInput) { + + /* Display a waiting message */ + //DisplayWaitingMessage(); + + /*data initialisation*/ + var searchFor = ""; // expression en lowercase et sans les caracte res speciaux + //w = new Object(); // hashtable, key=word, value = list of the index of the html files + scriptLetterTab = new Scriptfirstchar(); // Array containing the first letter of each word to look for + var wordsList = new Array(); // Array with the words to look for + var finalWordsList = new Array(); // Array with the words to look for after removing spaces + var linkTab = new Array(); + var fileAndWordList = new Array(); + var txt_wordsnotfound = ""; + + + /*nqu: expressionInput, la recherche est lower cased, plus remplacement des char speciaux*/ + searchFor = expressionInput.toLowerCase().replace(/<\//g, "_st_").replace(/\$_/g, "_di_").replace(/\.|%2C|%3B|%21|%3A|@|\/|\*/g, " ").replace(/(%20)+/g, " ").replace(/_st_/g, "= 0; i--) { + if (fileAndWordList[i] != undefined) { + linkTab.push("

                  " + txt_results_for + " " + "" + fileAndWordList[i][0].motslisteDisplay + "" + "

                  "); + + linkTab.push("
                    "); + for (t in fileAndWordList[i]) { + //DEBUG: alert(": "+ fileAndWordList[i][t].filenb+" " +fileAndWordList[i][t].motsliste); + //linkTab.push("
                  • "+fl[fileAndWordList[i][t].filenb]+"
                  • "); + var tempInfo = fil[fileAndWordList[i][t].filenb]; + var pos1 = tempInfo.indexOf("@@@"); + var pos2 = tempInfo.lastIndexOf("@@@"); + var tempPath = tempInfo.substring(0, pos1); + var tempTitle = tempInfo.substring(pos1 + 3, pos2); + var tempShortdesc = tempInfo.substring(pos2 + 3, tempInfo.length); + + //file:///home/kasun/docbook/WEBHELP/webhelp-draft-output-format-idea/src/main/resources/web/webhelp/installation.html + var linkString = "
                  • " + tempTitle + ""; + // var linkString = "
                  • " + tempTitle + ""; + if ((tempShortdesc != "null")) { + linkString += "\n
                    " + tempShortdesc + "
                    "; + } + linkString += "
                  • "; + linkTab.push(linkString); + } + linkTab.push("
                  "); + } + } + } + + var results = ""; + if (linkTab.length > 0) { + /*writeln ("

                  " + txt_results_for + " " + "" + cleanwordsList + "" + "
                  "+"

                  ");*/ + results = "

                  "; + //write("

                    "); + for (t in linkTab) { + results += linkTab[t].toString(); + } + results += "

                    "; + } else { + results = "

                    " + "Your search returned no results for " + "" + txt_wordsnotfound + "" + "

                    "; + } + //alert(results); + document.getElementById('searchResults').innerHTML = results; +} + +function tokenize(wordsList){ + var stemmedWordsList = new Array(); // Array with the words to look for after removing spaces + var cleanwordsList = new Array(); // Array with the words to look for + for(var j in wordsList){ + var word = wordsList[j]; + if(typeof stemmer != "undefined" ){ + stemQueryMap[stemmer(word)] = word; + } else { + stemQueryMap[word] = word; + } + } + //stemmedWordsList is the stemmed list of words separated by spaces. + for (var t in wordsList) { + wordsList[t] = wordsList[t].replace(/(%22)|^-/g, ""); + if (wordsList[t] != "%20") { + scriptLetterTab.add(wordsList[t].charAt(0)); + cleanwordsList.push(wordsList[t]); + } + } + + if(typeof stemmer != "undefined" ){ + //Do the stemming using Porter's stemming algorithm + for (var i = 0; i < cleanwordsList.length; i++) { + var stemWord = stemmer(cleanwordsList[i]); + stemmedWordsList.push(stemWord); + } + } else { + stemmedWordsList = cleanwordsList; + } + return stemmedWordsList; +} + +//Invoker of CJKTokenizer class methods. +function cjkTokenize(wordsList){ + var allTokens= new Array(); + var notCJKTokens= new Array(); + var j=0; + for(j=0;j"; + return this.input.substring(this.offset,this.offset+2); + } + + function getAllTokens(){ + while(this.incrementToken()){ + var tmp = this.tokenize(); + this.tokens.push(tmp); + } + return this.unique(this.tokens); +// document.getElementById("content").innerHTML += tokens+" "; +// document.getElementById("content").innerHTML += "
                    dada"+sortedTokens+" "; +// console.log(tokens.length+"dsdsds"); + /*for(i=0;i t2.length) { + return 1; + } else { + return -1; + } + //return t1.length - t2.length); } \ No newline at end of file diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/punctuation.props scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/punctuation.props --- scons-4.4.0+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/punctuation.props 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docbook-xsl-1.76.1/webhelp/template/content/search/punctuation.props 2023-03-21 16:17:04.000000000 +0000 @@ -1,31 +1,31 @@ -Punct01=\\u3002 -Punct02=\\u3003 -Punct03=\\u300C -Punct04=\\u300D -Punct05=\\u300E -Punct06=\\u300F -Punct07=\\u301D -Punct08=\\u301E -Punct09=\\u301F -Punct10=\\u309B -Punct11=\\u2018 -Punct12=\\u2019 -Punct13=\\u201A -Punct14=\\u201C -Punct15=\\u201D -Punct16=\\u201E -Punct17=\\u2032 -Punct18=\\u2033 -Punct19=\\u2035 -Punct20=\\u2039 -Punct21=\\u203A -Punct22=\\u201E -Punct23=\\u00BB -Punct24=\\u00AB -Punct25= -Punct26= -Punct27=\\u00A0 -Punct28=\\u2014 - - - +Punct01=\\u3002 +Punct02=\\u3003 +Punct03=\\u300C +Punct04=\\u300D +Punct05=\\u300E +Punct06=\\u300F +Punct07=\\u301D +Punct08=\\u301E +Punct09=\\u301F +Punct10=\\u309B +Punct11=\\u2018 +Punct12=\\u2019 +Punct13=\\u201A +Punct14=\\u201C +Punct15=\\u201D +Punct16=\\u201E +Punct17=\\u2032 +Punct18=\\u2033 +Punct19=\\u2035 +Punct20=\\u2039 +Punct21=\\u203A +Punct22=\\u201E +Punct23=\\u00BB +Punct24=\\u00AB +Punct25= +Punct26= +Punct27=\\u00A0 +Punct28=\\u2014 + + + diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docs/html.xsl scons-4.5.2+dfsg/SCons/Tool/docbook/docs/html.xsl --- scons-4.4.0+dfsg/SCons/Tool/docbook/docs/html.xsl 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docs/html.xsl 2023-03-21 16:17:04.000000000 +0000 @@ -1,55 +1,55 @@ - - - - - - - - - - -/appendix toc,title -article/appendix nop -/article toc,title -book toc,title,figure,table,example,equation -/chapter toc,title -part toc,title -/preface toc,title -reference toc,title -/sect1 toc -/sect2 toc -/sect3 toc -/sect4 toc -/sect5 toc -/section toc -set toc,title - - - - + + + + + + + + + + +/appendix toc,title +article/appendix nop +/article toc,title +book toc,title,figure,table,example,equation +/chapter toc,title +part toc,title +/preface toc,title +reference toc,title +/sect1 toc +/sect2 toc +/sect3 toc +/sect4 toc +/sect5 toc +/section toc +set toc,title + + + + diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/docs/pdf.xsl scons-4.5.2+dfsg/SCons/Tool/docbook/docs/pdf.xsl --- scons-4.4.0+dfsg/SCons/Tool/docbook/docs/pdf.xsl 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/docs/pdf.xsl 2023-03-21 16:17:04.000000000 +0000 @@ -1,62 +1,62 @@ - - - - - - - - - - -0pt - - -/appendix toc,title -article/appendix nop -/article toc,title -book toc,title,figure,table,example,equation -/chapter toc,title -part toc,title -/preface toc,title -reference toc,title -/sect1 toc -/sect2 toc -/sect3 toc -/sect4 toc -/sect5 toc -/section toc -set toc,title - - - - - - - - + + + + + + + + + + +0pt + + +/appendix toc,title +article/appendix nop +/article toc,title +book toc,title,figure,table,example,equation +/chapter toc,title +part toc,title +/preface toc,title +reference toc,title +/sect1 toc +/sect2 toc +/sect3 toc +/sect4 toc +/sect5 toc +/section toc +set toc,title + + + + + + + + diff -Nru scons-4.4.0+dfsg/SCons/Tool/docbook/__init__.py scons-4.5.2+dfsg/SCons/Tool/docbook/__init__.py --- scons-4.4.0+dfsg/SCons/Tool/docbook/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/docbook/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -69,7 +69,7 @@ # lxml etree XSLT global max traversal depth # -lmxl_xslt_global_max_depth = 3100 +lmxl_xslt_global_max_depth = 3600 if has_lxml and lmxl_xslt_global_max_depth: def __lxml_xslt_set_global_max_depth(max_depth): diff -Nru scons-4.4.0+dfsg/SCons/Tool/fortran.xml scons-4.5.2+dfsg/SCons/Tool/fortran.xml --- scons-4.4.0+dfsg/SCons/Tool/fortran.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/fortran.xml 2023-03-21 16:17:04.000000000 +0000 @@ -111,9 +111,8 @@ (or similar) include or module search path options that scons generates automatically from &cv-link-FORTRANPATH;. See -&cv-link-_FORTRANINCFLAGS; and &cv-link-_FORTRANMODFLAG;, -below, -for the variables that expand those options. +&cv-link-_FORTRANINCFLAGS; and &cv-link-_FORTRANMODFLAG; +for the &consvars; that expand those options.
  • @@ -123,8 +122,9 @@ General user-specified options that are passed to the Fortran compiler. Similar to &cv-link-FORTRANFLAGS;, -but this variable is applied to all dialects. +but this &consvar; is applied to all dialects. +New in version 4.4.
    diff -Nru scons-4.4.0+dfsg/SCons/Tool/gfortran.py scons-4.5.2+dfsg/SCons/Tool/gfortran.py --- scons-4.4.0+dfsg/SCons/Tool/gfortran.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/gfortran.py 2023-03-21 16:17:04.000000000 +0000 @@ -29,24 +29,27 @@ selection method. """ -import SCons.Util +from SCons.Util import CLVar from . import fortran def generate(env): - """Add Builders and construction variables for gfortran to an - Environment.""" + """Add Builders and construction variables for gfortran.""" fortran.generate(env) - for dialect in ['F77', 'F90', 'FORTRAN', 'F95', 'F03', 'F08']: - env[f'{dialect}'] = 'gfortran' - env[f'SH{dialect}'] = f'${dialect}' - if env['PLATFORM'] in ['cygwin', 'win32']: - env[f'SH{dialect}FLAGS'] = SCons.Util.CLVar(f'${dialect}FLAGS') - else: - env[f'SH{dialect}FLAGS'] = SCons.Util.CLVar(f'${dialect}FLAGS -fPIC') - + # fill in other dialects (FORTRAN dialect set by fortran.generate(), + # but don't overwrite if they have been set manually. + for dialect in ['F77', 'F90', 'F95', 'F03', 'F08']: + if dialect not in env: + env[f'{dialect}'] = 'gfortran' + if f'SH{dialect}' not in env: + env[f'SH{dialect}'] = f'${dialect}' + + # The fortran module always sets the shlib FLAGS, but does not + # include -fPIC, which is needed for the GNU tools. Rewrite if needed. + if env['PLATFORM'] not in ['cygwin', 'win32']: + env[f'SH{dialect}FLAGS'] = CLVar(f'${dialect}FLAGS -fPIC') env[f'INC{dialect}PREFIX'] = "-I" env[f'INC{dialect}SUFFIX'] = "" diff -Nru scons-4.4.0+dfsg/SCons/Tool/jar.xml scons-4.5.2+dfsg/SCons/Tool/jar.xml --- scons-4.4.0+dfsg/SCons/Tool/jar.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/jar.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ -Sets construction variables for the Microsoft Visual C/C++ compiler. +Sets &consvars; for the Microsoft Visual C/C++ compiler. @@ -96,7 +75,17 @@ PCH PCHSTOP PDB +MSVC_VERSION +MSVC_USE_SCRIPT +MSVC_USE_SCRIPT_ARGS +MSVC_USE_SETTINGS MSVC_NOTFOUND_POLICY +MSVC_SCRIPTERROR_POLICY +MSVC_SCRIPT_ARGS +MSVC_SDK_VERSION +MSVC_TOOLSET_VERSION +MSVC_SPECTRE_LIBS + @@ -110,7 +99,7 @@ This builder is only provided when Microsoft Visual C++ is being used as the compiler. The &b-PCH; builder is generally used in -conjunction with the &cv-link-PCH; construction variable to force object files to use +conjunction with the &cv-link-PCH; &consvar; to force object files to use the precompiled header: @@ -148,7 +137,7 @@ to support building with precompiled headers. The default value expands expands to the appropriate Microsoft Visual C++ command-line options -when the &cv-link-PCH; construction variable is set. +when the &cv-link-PCH; &consvar; is set.
    @@ -161,7 +150,7 @@ Microsoft Visual C++ PDB file. The default value expands expands to appropriate Microsoft Visual C++ command-line options -when the &cv-link-PDB; construction variable is set. +when the &cv-link-PDB; &consvar; is set. @@ -208,11 +197,11 @@ when calling the Microsoft Visual C/C++ compiler. All compilations of source files from the same source directory that generate target files in a same output directory -and were configured in SCons using the same construction environment +and were configured in SCons using the same &consenv; will be built in a single call to the compiler. Only source files that have changed since their object files were built will be passed to each compiler invocation -(via the &cv-link-CHANGED_SOURCES; construction variable). +(via the &cv-link-CHANGED_SOURCES; &consvar;). Any compilations where the object (target) file base name (minus the .obj) does not match the source file base name @@ -261,9 +250,9 @@ -A construction variable that, when expanded, +A &consvar; that, when expanded, adds the flag to the command line -only if the &cv-link-PDB; construction variable is set. +only if the &cv-link-PDB; &consvar; is set. @@ -324,7 +313,7 @@ -An automatically-generated construction variable +An automatically-generated &consvar; containing the command-line options for specifying directories to be searched by the resource compiler. @@ -343,7 +332,7 @@ The prefix (flag) used to specify an include directory on the resource compiler command line. This will be prepended to the beginning of each directory -in the &cv-link-CPPPATH; construction variable +in the &cv-link-CPPPATH; &consvar; when the &cv-link-RCINCFLAGS; variable is expanded. @@ -355,7 +344,7 @@ The suffix used to specify an include directory on the resource compiler command line. This will be appended to the end of each directory -in the &cv-link-CPPPATH; construction variable +in the &cv-link-CPPPATH; &consvar; when the &cv-link-RCINCFLAGS; variable is expanded.
    @@ -365,12 +354,10 @@ Sets the preferred version of Microsoft Visual C/C++ to use. - - - +If the specified version is unavailable (not installed, +or not discoverable), tool initialization will fail. If &cv-MSVC_VERSION; is not set, SCons will (by default) select the -latest version of Visual C/C++ installed on your system. If the -specified version isn't installed, tool initialization will fail. +latest version of Visual C/C++ installed on your system. @@ -383,28 +370,186 @@ -Valid values for Windows are -14.3, -14.2, -14.1, -14.1Exp, -14.0, -14.0Exp, -12.0, -12.0Exp, -11.0, -11.0Exp, -10.0, -10.0Exp, -9.0, -9.0Exp, -8.0, -8.0Exp, -7.1, -7.0, -and 6.0. -Versions ending in Exp refer to "Express" or -"Express for Desktop" editions. +The valid values for &cv-MSVC_VERSION; represent major versions +of the compiler, except that versions ending in Exp +refer to "Express" or "Express for Desktop" Visual Studio editions, +which require distict entries because they use a different +filesystem layout and have some feature limitations compared to +the full version. +The following table shows correspondence +of the selector string to various version indicators +('x' is used as a placeholder for +a single digit that can vary). +Note that it is not necessary to install Visual Studio +to build with &SCons; (for example, you can install only +Build Tools), but if Visual Studio is installed, +additional builders such as &b-link-MSVSSolution; and +&b-link-MSVSProject; become avaialable and will +correspond to the indicated versions. + + + + + + + + + + + + SCons Key + MSVC++ Version + _MSVC_VER + VS Product + MSBuild/VS Version + + + + + 14.3 + 14.3x + 193x + Visual Studio 2022 + 17.x + + + 14.2 + 14.2x + 192x + Visual Studio 2019 + 16.x, 16.1x + + + 14.1 + 14.1 or 14.1x + 191x + Visual Studio 2017 + 15.x + + + 14.1Exp + 14.1 + 1910 + Visual Studio 2017 Express + 15.0 + + + 14.0 + 14.0 + 1900 + Visual Studio 2015 + 14.0 + + + 14.0Exp + 14.0 + 1900 + Visual Studio 2015 Express + 14.0 + + + 12.0 + 12.0 + 1800 + Visual Studio 2013 + 12.0 + + + 12.0Exp + 12.0 + 1800 + Visual Studio 2013 Express + 12.0 + + + 11.0 + 11.0 + 1700 + Visual Studio 2012 + 11.0 + + + 11.0Exp + 11.0 + 1700 + Visual Studio 2012 Express + 11.0 + + + 10.0 + 10.0 + 1600 + Visual Studio 2010 + 10.0 + + + 10.0Exp + 10.0 + 1600 + Visual C++ Express 2010 + 10.0 + + + 9.0 + 9.0 + 1500 + Visual Studio 2008 + 9.0 + + + 9.0Exp + 9.0 + 1500 + Visual C++ Express 2008 + 9.0 + + + 8.0 + 8.0 + 1400 + Visual Studio 2005 + 8.0 + + + 8.0Exp + 8.0 + 1400 + Visual C++ Express 2005 + 8.0 + + + 7.1 + 7.1 + 1300 + Visual Studio .NET 2003 + 7.1 + + + 7.0 + 7.0 + 1200 + Visual Studio .NET 2002 + 7.0 + + + 6.0 + 6.0 + 1100 + Visual Studio 6.0 + 6.0 + + + + + + +The compilation environment can be further or more precisely specified through the +use of several other &consvars;: see the descriptions of +&cv-link-MSVC_TOOLSET_VERSION;, +&cv-link-MSVC_SDK_VERSION;, +&cv-link-MSVC_USE_SCRIPT;, +&cv-link-MSVC_USE_SCRIPT_ARGS;, +and &cv-link-MSVC_USE_SETTINGS;. @@ -433,7 +578,7 @@ Setting &cv-MSVC_USE_SCRIPT; to None bypasses the Visual Studio autodetection entirely; -use this if you are running SCons in a Visual Studio cmd +use this if you are running &SCons; in a Visual Studio cmd window and importing the shell's environment variables - that is, if you are sure everything is set correctly already and you don't want &SCons; to change anything. @@ -441,6 +586,12 @@ &cv-MSVC_USE_SCRIPT; ignores &cv-link-MSVC_VERSION; and &cv-link-TARGET_ARCH;. + +Changed in version 4.4: +new &cv-link-MSVC_USE_SCRIPT_ARGS; provides a +way to pass arguments. + +
    @@ -449,6 +600,9 @@ Provides arguments passed to the script &cv-link-MSVC_USE_SCRIPT;. + +New in version 4.4 + @@ -529,11 +683,15 @@ The burden is on the user to ensure the dictionary contents are minimally sufficient to ensure successful builds. -
    + + + +New in version 4.4 + @@ -780,6 +938,8 @@ subject to the conditions listed above. The default &scons; behavior may change in the future. +New in version 4.4 + @@ -831,6 +991,9 @@ Suppress msvc batch file error messages. + +New in version 4.4 + @@ -905,6 +1068,8 @@ +New in version 4.4 + @@ -916,8 +1081,8 @@ &cv-MSVC_SCRIPT_ARGS; is available for msvc batch file arguments that do not have first-class support -via construction variables or when there is an issue with the appropriate construction variable validation. -When available, it is recommended to use the appropriate construction variables (e.g., &cv-link-MSVC_TOOLSET_VERSION;) +via &consvars; or when there is an issue with the appropriate &consvar; validation. +When available, it is recommended to use the appropriate &consvars; (e.g., &cv-link-MSVC_TOOLSET_VERSION;) rather than &cv-MSVC_SCRIPT_ARGS; arguments. @@ -1041,6 +1206,8 @@ +New in version 4.4 + @@ -1159,6 +1326,8 @@ +New in version 4.4 + @@ -1329,6 +1498,8 @@ +New in version 4.4 + @@ -1409,6 +1580,8 @@ +New in version 4.4 + diff -Nru scons-4.4.0+dfsg/SCons/Tool/msvs.xml scons-4.5.2+dfsg/SCons/Tool/msvs.xml --- scons-4.4.0+dfsg/SCons/Tool/msvs.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/msvs.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,10 +1,12 @@ + %scons; @@ -20,7 +22,7 @@ - Sets construction variables for Microsoft Visual Studio. + Sets &consvars; for Microsoft Visual Studio. MSVSPROJECTCOM @@ -39,70 +41,106 @@ - Builds a Microsoft Visual Studio project file, and by default - builds a solution file as well. - - - This builds a Visual Studio project file, based on the - version of Visual Studio that is configured (either the - latest installed version, or the version specified by - &cv-link-MSVS_VERSION; in the Environment constructor). For - Visual Studio 6, it will generate a .dsp - file. For Visual Studio 7, 8, and 9, it will - generate a .vcproj file. For Visual - Studio 10 and later, it will generate a - .vcxproj file. - - - By default, this also generates a solution file for the - specified project, a .dsw file for - Visual Studio 6 or a .sln file for - Visual Studio 7 and later. This behavior may be disabled by - specifying auto_build_solution=0 when you - call &b-MSVSProject;, in which case you presumably want to - build the solution file(s) by calling the &b-MSVSSolution; - Builder (see below). - - - The &b-MSVSProject; builder takes several lists of filenames - to be placed into the project file. These are currently - limited to srcs, incs, - localincs, resources, and - misc. These are pretty self-explanatory, - but it should be noted that these lists are added to the - &cv-link-SOURCES; construction variable as strings, NOT as - SCons File Nodes. This is because they represent file names - to be added to the project file, not the source files used - to build the project file. + Build a Microsoft Visual C++ project file and solution file. - The above filename lists are all optional, although at least - one must be specified for the resulting project file to + Builds a C++ project file based on the + version of Visual Studio (or to be more precise, of MSBuild) + that is configured: either the latest installed version, + or the version specified by + &cv-link-MSVC_VERSION; in the current &consenv;. + For Visual Studio 6.0 a .dsp file is generated. + For Visual Studio versions 2002-2008, + a .vcproj file is generated. + For Visual Studio 2010 and later a .vcxproj + file is generated. + Note there are multiple versioning schemes involved in + the Microsoft compilation environment - + see the description of &cv-link-MSVC_VERSION; for equivalences. + &SCons; does not know how to construct project files for + other languages (such as .csproj for C#, + .vbproj for Visual Basic or + .pyproject for Python)). + + + For the .vcxproj file, the underlying + format is the MSBuild XML Schema, and the details conform to: + + https://learn.microsoft.com/en-us/cpp/build/reference/vcxproj-file-structure. + The generated solution file enables Visual Studio to + understand the project structure, and allows building it + using MSBuild to call back to &SCons;. + The project file encodes a toolset version that has been + selected by &SCons; as described above. Since recent Visual + Studio versions support multiple concurrent toolsets, + use &cv-link-MSVC_VERSION; to select the desired one if + it does not match the &SCons; default. + The project file also includes entries which describe + how to call &SCons; to build the project from within Visual Studio + (or from an MSBuild command line). + In some situations &SCons; may generate this incorrectly - + notably when using the scons-local + distribution, which is not installed in a way that that + matches the default invocation line. + If so, the &cv-link-SCONS_HOME; &consvar; can be used to describe + the right way to locate the &SCons; code so that it can be imported. + + + By default, a matching solution file for the project is also generated. + This behavior may be disabled by + specifying auto_build_solution=0 + to the &b-MSVSProject; builder. + The solution file can also be independently + generated by calling the &b-MSVSSolution; builder, + such as in the case where a solution should describe + multiple projects. + See the &b-link-MSVSSolution; description for further information. + + + The &b-MSVSProject; builder accepts several keyword arguments + describing lists of filenames to be placed into the project file. + Currently, + srcs, + incs, + localincs, + resources, + and misc + are recognized. + The names are intended to be self-explanatory, but note that the + filenames need to be specified as strings, not + as &SCons; File Nodes + (for example if you generate files for inclusion by using the + &f-link-Glob; function, the results should be converted to + a list of strings before passing them to &b-MSVSProject;). + This is because Visual Studio and MSBuild know nothing about &SCons; + Node types. + Each of the filename lists are individually optional, but at + least one list must be specified for the resulting project file to be non-empty. In addition to the above lists of values, the following values - may be specified: + may be specified as keyword arguments: - target + target The name of the target .dsp or .vcproj file. The correct suffix for the version of Visual Studio must be used, but the &cv-link-MSVSPROJECTSUFFIX; - construction variable will be defined to the correct + &consvar; will be defined to the correct value (see example below). - variant + variant - The name of this particular variant. For Visual Studio 7 + The name of this particular variant. Except for Visual Studio 6 projects, this can also be a list of variant names. These are typically things like "Debug" or "Release", but really can be anything you want. For Visual Studio @@ -117,145 +155,184 @@ - cmdargs + cmdargs Additional command line arguments for the different variants. The number of - cmdargs entries must match the number - of variant entries, or be empty (not + cmdargs entries must match the number + of variant entries, or be empty (not specified). If you give only one, it will automatically be propagated to all variants. - cppdefines + cppdefines Preprocessor definitions for the different variants. - The number of cppdefines entries - must match the number of variant + The number of cppdefines entries + must match the number of variant entries, or be empty (not specified). If you give only one, it will automatically be propagated to all - variants. If you don't give this parameter, SCons + variants. If you don't give this parameter, &SCons; will use the invoking environment's - CPPDEFINES entry for all variants. + &cv-link-CPPDEFINES; entry for all variants. - cppflags + cppflags Compiler flags for the different variants. - If a /std:c++ flag is found then /Zc:__cplusplus is - appended to the flags if not already found, this - ensures that intellisense uses the /std:c++ switch. - The number of cppflags entries - must match the number of variant + If a flag is found then + is appended to the + flags if not already found, this ensures that Intellisense + uses the switch. + The number of cppflags entries + must match the number of variant entries, or be empty (not specified). If you give only one, it will automatically be propagated to all variants. If you don't give this parameter, SCons will combine the invoking environment's - CCFLAGS, CXXFLAGS, - CPPFLAGS entries for all variants. + &cv-link-CCFLAGS;, &cv-link-CXXFLAGS;, + &cv-link-CPPFLAGS; entries for all variants. - cpppaths + cpppaths Compiler include paths for the different variants. - The number of cpppaths entries - must match the number of variant + The number of cpppaths entries + must match the number of variant entries, or be empty (not specified). If you give only one, it will automatically be propagated to all variants. If you don't give this parameter, SCons will use the invoking environment's - CPPPATH entry for all variants. + &cv-link-CPPPATH; entry for all variants. - buildtarget + buildtarget An optional string, node, or list of strings or nodes (one per build variant), to tell the Visual Studio debugger what output target to use in what build variant. The number of - buildtarget entries must match the - number of variant entries. + buildtarget entries must match the + number of variant entries. - runfile + runfile The name of the file that Visual Studio 7 and later will run and debug. This appears as the - value of the Output field in the - resulting Visual Studio project file. If this is not + value of the Output field in the + resulting Visual C++ project file. If this is not specified, the default is the same as the specified - buildtarget value. + buildtarget value. + - Note that because &SCons; always executes its build commands + &SCons; and Microsoft Visual Studio understand projects in + different ways, and the mapping is sometimes imperfect: + + + Because &SCons; always executes its build commands from the directory in which the &SConstruct; file is located, if you generate a project file in a different directory - than the &SConstruct; directory, users will not be able to + than the directory of the &SConstruct; file, users will not be able to double-click on the file name in compilation error messages displayed in the Visual Studio console output window. This can - be remedied by adding the Visual C/C++ /FC + be remedied by adding the Visual C/C++ compiler option to the &cv-link-CCFLAGS; variable so that the compiler will print the full path name of any files that cause compilation errors. + + If the project file is only used to teach the Visual Studio + project browser about the file layout there should be no issues, + However, Visual Studio should not be used to make changes + to the project structure, build options, etc. as these will + (a) not feed back to the &SCons; description of the project + and (b) be lost if &SCons; regenerates the project file. + The SConscript files should remain the definitive description + of the build. + + + If the project file is used to drive MSBuild (such as selecting + "build" from the Visual Studio interface) you lose the direct + control of target selection and command-line options you would + have if launching the build directly from &SCons;, + because these will be hardcoded in the project file to the + values specified in the &b-MSVSProject; call. + You can regain some of this control by defining multiple variants, + using multiple &b-MSVSProject; calls to arrange different build + targets, arguments, defines, flags and paths for different variants. + + + If the build is divided into a solution with multiple MSBuild + projects the mapping is further strained. In this case, + it is important not to set Visual Studio to do parallel builds, + as it will then launch the separate project builds in parallel, + and &SCons; does not work well if called that way. + Instead you can set up the &SCons; build for parallel building - + see the &f-link-SetOption; function for how to do this with + num_jobs. + + + Example usage: barsrcs = ['bar.cpp'] barincs = ['bar.h'] barlocalincs = ['StdAfx.h'] -barresources = ['bar.rc','resource.h'] +barresources = ['bar.rc', 'resource.h'] barmisc = ['bar_readme.txt'] -dll = env.SharedLibrary(target='bar.dll', - source=barsrcs) +dll = env.SharedLibrary(target='bar.dll', source=barsrcs) buildtarget = [s for s in dll if str(s).endswith('dll')] -env.MSVSProject(target='Bar' + env['MSVSPROJECTSUFFIX'], - srcs=barsrcs, - incs=barincs, - localincs=barlocalincs, - resources=barresources, - misc=barmisc, - buildtarget=buildtarget, - variant='Release') +env.MSVSProject( + target='Bar' + env['MSVSPROJECTSUFFIX'], + srcs=barsrcs, + incs=barincs, + localincs=barlocalincs, + resources=barresources, + misc=barmisc, + buildtarget=buildtarget, + variant='Release', +) - - Starting with version 2.4 of SCons it is - also possible to specify the optional argument - DebugSettings, which creates files - for debugging under Visual Studio: - + - DebugSettings + DebugSettings A dictionary of debug settings that get written to the .vcproj.user or the .vcxproj.user file, depending on the - version installed. As it is done for cmdargs (see above), + version installed. As for cmdargs, you can specify a DebugSettings dictionary per variant. If you give only one, it will be propagated to all variants. + + Changed in version 2.4: + Added the optional DebugSettings parameter. + @@ -279,12 +356,17 @@ # Check command args to force one Microsoft Visual Studio version if msvcver == '9' or msvcver == '11': - env = Environment(MSVC_VERSION=msvcver+'.0', MSVC_BATCH=False) + env = Environment(MSVC_VERSION=msvcver + '.0', MSVC_BATCH=False) else: - env = Environment() + env = Environment() -AddOption('--userfile', action='store_true', dest='userfile', default=False, - help="Create Visual Studio Project user file") +AddOption( + '--userfile', + action='store_true', + dest='userfile', + default=False, + help="Create Visual C++ project file", +) # # 1. Configure your Debug Setting dictionary with options you want in the list @@ -292,28 +374,28 @@ # a specific application for testing your dll with Microsoft Visual Studio 2008 (v9): # V9DebugSettings = { - 'Command':'c:\\myapp\\using\\thisdll.exe', + 'Command': 'c:\\myapp\\using\\thisdll.exe', 'WorkingDirectory': 'c:\\myapp\\using\\', 'CommandArguments': '-p password', -# 'Attach':'false', -# 'DebuggerType':'3', -# 'Remote':'1', -# 'RemoteMachine': None, -# 'RemoteCommand': None, -# 'HttpUrl': None, -# 'PDBPath': None, -# 'SQLDebugging': None, -# 'Environment': '', -# 'EnvironmentMerge':'true', -# 'DebuggerFlavor': None, -# 'MPIRunCommand': None, -# 'MPIRunArguments': None, -# 'MPIRunWorkingDirectory': None, -# 'ApplicationCommand': None, -# 'ApplicationArguments': None, -# 'ShimCommand': None, -# 'MPIAcceptMode': None, -# 'MPIAcceptFilter': None, + # 'Attach':'false', + # 'DebuggerType':'3', + # 'Remote':'1', + # 'RemoteMachine': None, + # 'RemoteCommand': None, + # 'HttpUrl': None, + # 'PDBPath': None, + # 'SQLDebugging': None, + # 'Environment': '', + # 'EnvironmentMerge':'true', + # 'DebuggerFlavor': None, + # 'MPIRunCommand': None, + # 'MPIRunArguments': None, + # 'MPIRunWorkingDirectory': None, + # 'ApplicationCommand': None, + # 'ApplicationArguments': None, + # 'ShimCommand': None, + # 'MPIAcceptMode': None, + # 'MPIAcceptFilter': None, } # @@ -327,28 +409,28 @@ 'LocalDebuggerCommand': 'c:\\myapp\\using\\thisdll.exe', 'LocalDebuggerWorkingDirectory': 'c:\\myapp\\using\\', 'LocalDebuggerCommandArguments': '-p password', -# 'LocalDebuggerEnvironment': None, -# 'DebuggerFlavor': 'WindowsLocalDebugger', -# 'LocalDebuggerAttach': None, -# 'LocalDebuggerDebuggerType': None, -# 'LocalDebuggerMergeEnvironment': None, -# 'LocalDebuggerSQLDebugging': None, -# 'RemoteDebuggerCommand': None, -# 'RemoteDebuggerCommandArguments': None, -# 'RemoteDebuggerWorkingDirectory': None, -# 'RemoteDebuggerServerName': None, -# 'RemoteDebuggerConnection': None, -# 'RemoteDebuggerDebuggerType': None, -# 'RemoteDebuggerAttach': None, -# 'RemoteDebuggerSQLDebugging': None, -# 'DeploymentDirectory': None, -# 'AdditionalFiles': None, -# 'RemoteDebuggerDeployDebugCppRuntime': None, -# 'WebBrowserDebuggerHttpUrl': None, -# 'WebBrowserDebuggerDebuggerType': None, -# 'WebServiceDebuggerHttpUrl': None, -# 'WebServiceDebuggerDebuggerType': None, -# 'WebServiceDebuggerSQLDebugging': None, + # 'LocalDebuggerEnvironment': None, + # 'DebuggerFlavor': 'WindowsLocalDebugger', + # 'LocalDebuggerAttach': None, + # 'LocalDebuggerDebuggerType': None, + # 'LocalDebuggerMergeEnvironment': None, + # 'LocalDebuggerSQLDebugging': None, + # 'RemoteDebuggerCommand': None, + # 'RemoteDebuggerCommandArguments': None, + # 'RemoteDebuggerWorkingDirectory': None, + # 'RemoteDebuggerServerName': None, + # 'RemoteDebuggerConnection': None, + # 'RemoteDebuggerDebuggerType': None, + # 'RemoteDebuggerAttach': None, + # 'RemoteDebuggerSQLDebugging': None, + # 'DeploymentDirectory': None, + # 'AdditionalFiles': None, + # 'RemoteDebuggerDeployDebugCppRuntime': None, + # 'WebBrowserDebuggerHttpUrl': None, + # 'WebBrowserDebuggerDebuggerType': None, + # 'WebServiceDebuggerHttpUrl': None, + # 'WebServiceDebuggerDebuggerType': None, + # 'WebServiceDebuggerSQLDebugging': None, } # @@ -370,71 +452,85 @@ barsrcs = ['bar.cpp', 'dllmain.cpp', 'stdafx.cpp'] barincs = ['targetver.h'] barlocalincs = ['StdAfx.h'] -barresources = ['bar.rc','resource.h'] +barresources = ['bar.rc', 'resource.h'] barmisc = ['ReadMe.txt'] -dll = env.SharedLibrary(target='bar.dll', - source=barsrcs) +dll = env.SharedLibrary(target='bar.dll', source=barsrcs) -env.MSVSProject(target='Bar' + env['MSVSPROJECTSUFFIX'], - srcs=barsrcs, - incs=barincs, - localincs=barlocalincs, - resources=barresources, - misc=barmisc, - buildtarget=[dll[0]] * 2, - variant=('Debug|Win32', 'Release|Win32'), - cmdargs='vc=%s' % msvcver, - DebugSettings=(dbgSettings, {})) +env.MSVSProject( + target='Bar' + env['MSVSPROJECTSUFFIX'], + srcs=barsrcs, + incs=barincs, + localincs=barlocalincs, + resources=barresources, + misc=barmisc, + buildtarget=[dll[0]] * 2, + variant=('Debug|Win32', 'Release|Win32'), + cmdargs=f'vc={msvcver}', + DebugSettings=(dbgSettings, {}), +) - Builds a Microsoft Visual Studio solution file. + Build a Microsoft Visual Studio Solution file. - This builds a Visual Studio solution file, based on the - version of Visual Studio that is configured (either the + Builds a Visual Studio solution file based on the + version of Visual Studio that is configured: either the latest installed version, or the version specified by - &cv-link-MSVS_VERSION; in the construction environment). For - Visual Studio 6, it will generate a .dsw - file. For Visual Studio 7 (.NET), it will generate a - .sln file. + &cv-link-MSVC_VERSION; in the &consenv;. For + Visual Studio 6, a .dsw file is generated. + For Visual Studio .NET 2002 and later, + it will generate a .sln file. + Note there are multiple versioning schemes involved in + the Microsoft compilation environment - + see the description of &cv-link-MSVC_VERSION; for equivalences. + + + The solution file is a container for one or more projects, + and follows the format described at + + https://learn.microsoft.com/en-us/visualstudio/extensibility/internals/solution-dot-sln-file. The following values must be specified: - target + target - The name of the target .dsw or .sln file. The correct + The name of the target .dsw or + .sln file. The correct suffix for the version of Visual Studio must be used, but the value &cv-link-MSVSSOLUTIONSUFFIX; will be defined to the correct value (see example below). - - variant + + + variant + The name of this particular variant, or a list of variant names (the latter is only supported for MSVS 7 solutions). These are typically things like "Debug" or "Release", but really can be anything you want. For MSVS 7 they may also specify target platform, like this - "Debug|Xbox". Default platform is Win32. + "Debug|Xbox". Default platform is Win32. - - projects + + + projects + A list of project file names, or Project nodes returned - by calls to the &b-MSVSProject; Builder, to be placed - into the solution file. It should be noted that these - file names are NOT added to the $SOURCES environment - variable in form of files, but rather as strings. - This is because they represent file names to be added - to the solution file, not the source files used to - build the solution file. + by calls to the &b-link-MSVSProject; Builder, to be placed + into the solution file. + Note that these filenames need to be specified as strings, + NOT as &SCons; File Nodes. + This is because the solution file will be interpreted by MSBuild + and by Visual Studio, which know nothing about &SCons; Node types. @@ -456,28 +552,39 @@ - VERSION + VERSION + the version of MSVS being used (can be set via - &cv-link-MSVS_VERSION;) + &cv-link-MSVC_VERSION;) - - VERSIONS + + + VERSIONS + the available versions of MSVS installed - - VCINSTALLDIR + + + VCINSTALLDIR + installed directory of Visual C++ - - VSINSTALLDIR + + + VSINSTALLDIR + installed directory of Visual Studio - - FRAMEWORKDIR + + + FRAMEWORKDIR + installed directory of the .NET framework - - FRAMEWORKVERSIONS + + + FRAMEWORKVERSIONS + list of installed versions of the .NET framework, sorted latest to oldest. @@ -514,7 +621,12 @@ - If a value is not set, it was not available in the registry. + If a value is not set, it was not available in the registry. + Visual Studio 2017 and later do not use the registry for + primary storage of this information, so typically for these + versions only PROJECTSUFFIX and + SOLUTIONSUFFIX will be set. + @@ -534,7 +646,7 @@ The string placed in a generated -Microsoft Visual Studio project file as the value of the +Microsoft Visual C++ project file as the value of the ProjectGUID attribute. There is no default value. If not defined, a new GUID is generated. @@ -545,9 +657,9 @@ The path name placed in a generated -Microsoft Visual Studio project file as the value of the +Microsoft Visual C++ project file as the value of the SccAuxPath attribute if the - MSVS_SCC_PROVIDER construction variable is + MSVS_SCC_PROVIDER &consvar; is also set. There is no default value. @@ -559,7 +671,7 @@ The root path of projects in your SCC workspace, i.e the path under which all project and solution files will be generated. It is used as a reference path from which the - relative paths of the generated Microsoft Visual Studio project + relative paths of the generated Microsoft Visual C++ project and solution files are computed. The relative project file path is placed as the value of the SccLocalPath attribute of the project file and as the values of the @@ -572,7 +684,7 @@ to the number of projects in the solution) attributes of the GlobalSection(SourceCodeControl) section of the Microsoft Visual Studio solution file. This is used only if - the MSVS_SCC_PROVIDER construction variable is + the MSVS_SCC_PROVIDER &consvar; is also set. The default value is the current working directory. @@ -580,9 +692,9 @@ The project name placed in a generated Microsoft - Visual Studio project file as the value of the + Visual C++ project file as the value of the SccProjectName attribute if the - MSVS_SCC_PROVIDER construction variable + MSVS_SCC_PROVIDER &consvar; is also set. In this case the string is also placed in the SccProjectName0 attribute of the GlobalSection(SourceCodeControl) section @@ -594,7 +706,7 @@ The string placed in a generated Microsoft - Visual Studio project file as the value of the + Visual C++ project file as the value of the SccProvider attribute. The string is also placed in the SccProvider0 attribute of the GlobalSection(SourceCodeControl) @@ -604,23 +716,25 @@ - Sets the preferred version of Microsoft Visual Studio to use. + Set the preferred version of Microsoft Visual Studio to use. If &cv-MSVS_VERSION; is not set, &SCons; will (by default) select the latest version of Visual Studio installed on your system. So, if you have version 6 and version 7 (MSVS .NET) installed, it will prefer version 7. You can override this by - specifying the MSVS_VERSION variable in the - Environment initialization, setting it to the appropriate + specifying the &cv-link-MSVS_VERSION; variable when + initializing the Environment, setting it to the appropriate version ('6.0' or '7.0', for example). If the specified version isn't installed, tool initialization will fail. - This is obsolete: use &cv-MSVC_VERSION; instead. If - &cv-MSVS_VERSION; is set and &cv-MSVC_VERSION; is - not, &cv-MSVC_VERSION; will be set automatically to - &cv-MSVS_VERSION;. If both are set to different values, - scons will raise an error. + Deprecated since 1.3.0: + &cv-MSVS_VERSION; is deprecated in favor of &cv-link-MSVC_VERSION;. + As a transitional aid, if &cv-MSVS_VERSION; is set + and &cv-MSVC_VERSION; is not, + &cv-MSVC_VERSION; will be initialized to the value + of &cv-MSVS_VERSION;. + An error is raised if If both are set and have different values, @@ -628,8 +742,8 @@ The build command line placed in a generated Microsoft Visual - Studio project file. The default is to have Visual Studio - invoke SCons with any specified build targets. + C++ project file. The default is to have Visual Studio + invoke &SCons; with any specified build targets. @@ -637,33 +751,34 @@ The clean command line placed in a generated Microsoft Visual - Studio project file. The default is to have Visual Studio - invoke SCons with the -c option to remove any specified - targets. + C++ project file. The default is to have Visual Studio + invoke &SCons; with the option to remove + any specified targets. The encoding string placed in a generated Microsoft - Visual Studio project file. The default is encoding + Visual C++ project file. The default is encoding Windows-1252. - The action used to generate Microsoft Visual Studio project files. + The action used to generate Microsoft Visual C++ project files. - The suffix used for Microsoft Visual Studio project (DSP) - files. The default value is .vcproj - when using Visual Studio version 7.x (.NET) or later version, - and .dsp when using earlier versions of - Visual Studio. + The suffix used for Microsoft Visual C++ project (DSP) + files. The default value is + .vcxproj when using Visual Studio 2010 + and later, .vcproj + when using Visual Studio versions between 2002 and 2008, + and .dsp when using Visual Studio 6.0. @@ -671,8 +786,8 @@ The rebuild command line placed in a generated Microsoft - Visual Studio project file. The default is to have Visual - Studio invoke SCons with any specified rebuild targets. + Visual C++ project file. The default is to have Visual + Studio invoke &SCons; with any specified rebuild targets. @@ -680,8 +795,8 @@ - The SCons used in generated Microsoft Visual Studio project - files. The default is the version of SCons being used to + The &SCons; used in generated Microsoft Visual C++ project + files. The default is the version of &SCons; being used to generate the project file. @@ -689,15 +804,15 @@ - The SCons flags used in generated Microsoft Visual Studio project files. + The &SCons; flags used in generated Microsoft Visual C++ project files. - The default SCons command used in generated Microsoft Visual - Studio project files. + The default &SCons; command used in generated Microsoft Visual + C++ project files. @@ -705,10 +820,10 @@ The sconscript file (that is, &SConstruct; or &SConscript; - file) that will be invoked by Visual Studio project files + file) that will be invoked by Visual C++ project files (through the &cv-link-MSVSSCONSCOM; variable). The default is the same sconscript file that contains the call to - &b-MSVSProject; to build the project file. + &b-link-MSVSProject; to build the project file. @@ -721,20 +836,19 @@ The suffix used for Microsoft Visual Studio solution (DSW) files. The default value is .sln - when using Visual Studio version 7.x (.NET), and - .dsw when using earlier versions of - Visual Studio. + when using Visual Studio version 7.x (.NET 2002) and later, + and .dsw when using Visual Studio 6.0. - The (optional) path to the SCons library directory, + The (optional) path to the &SCons; library directory, initialized from the external environment. If set, this is used to construct a shorter and more efficient search path in the &cv-link-MSVSSCONS; command line executed from Microsoft - Visual Studio project files. + Visual C++ project files. diff -Nru scons-4.4.0+dfsg/SCons/Tool/ninja/__init__.py scons-4.5.2+dfsg/SCons/Tool/ninja/__init__.py --- scons-4.4.0+dfsg/SCons/Tool/ninja/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/ninja/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -34,6 +34,7 @@ import SCons.Script import SCons.Tool.ninja.Globals from SCons.Script import GetOption +from SCons.Util import sanitize_shell_env from .Globals import NINJA_RULES, NINJA_POOLS, NINJA_CUSTOM_HANDLERS, NINJA_DEFAULT_TARGETS, NINJA_CMDLINE_TARGETS from .Methods import register_custom_handler, register_custom_rule_mapping, register_custom_rule, register_custom_pool, \ @@ -100,11 +101,16 @@ # reproduce the output like a ninja build would def execute_ninja(): + if env['PLATFORM'] == 'win32': + spawn_env = os.environ + else: + spawn_env = sanitize_shell_env(env['ENV']) + proc = subprocess.Popen(cmd, stderr=sys.stderr, stdout=subprocess.PIPE, universal_newlines=True, - env=os.environ if env["PLATFORM"] == "win32" else env['ENV'] + env=spawn_env ) for stdout_line in iter(proc.stdout.readline, ""): yield stdout_line @@ -416,7 +422,7 @@ # The Serial job class is SIGNIFICANTLY (almost twice as) faster # than the Parallel job class for generating Ninja files. So we # monkey the Jobs constructor to only use the Serial Job class. - SCons.Job.Jobs.__init__ = ninja_always_serial + SCons.Taskmaster.Job.Jobs.__init__ = ninja_always_serial ninja_syntax = importlib.import_module(".ninja_syntax", package='ninja') diff -Nru scons-4.4.0+dfsg/SCons/Tool/ninja/Overrides.py scons-4.5.2+dfsg/SCons/Tool/ninja/Overrides.py --- scons-4.4.0+dfsg/SCons/Tool/ninja/Overrides.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/ninja/Overrides.py 2023-03-21 16:17:04.000000000 +0000 @@ -84,7 +84,7 @@ # builds. So here we lie so the Main.py will not give a false # warning to users. self.num_jobs = num - self.job = SCons.Job.Serial(taskmaster) + self.job = SCons.Taskmaster.Job.Serial(taskmaster) # pylint: disable=too-few-public-methods diff -Nru scons-4.4.0+dfsg/SCons/Tool/ninja/Utils.py scons-4.5.2+dfsg/SCons/Tool/ninja/Utils.py --- scons-4.4.0+dfsg/SCons/Tool/ninja/Utils.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/ninja/Utils.py 2023-03-21 16:17:04.000000000 +0000 @@ -285,6 +285,7 @@ sorted_dict = ninja_recursive_sorted_dict(build) ninja.build(**sorted_dict) + def get_command_env(env, target, source): """ Return a string that sets the environment for any environment variables that @@ -311,21 +312,8 @@ windows = env["PLATFORM"] == "win32" command_env = "" + scons_specified_env = SCons.Util.sanitize_shell_env(scons_specified_env) for key, value in scons_specified_env.items(): - # Ensure that the ENV values are all strings: - if is_List(value): - # If the value is a list, then we assume it is a - # path list, because that's a pretty common list-like - # value to stick in an environment variable: - value = flatten_sequence(value) - value = joinpath(map(str, value)) - else: - # If it isn't a string or a list, then we just coerce - # it to a string, which is the proper way to handle - # Dir and File instances and will produce something - # reasonable for just about everything else: - value = str(value) - if windows: command_env += "set '{}={}' && ".format(key, value) else: diff -Nru scons-4.4.0+dfsg/SCons/Tool/qt3.py scons-4.5.2+dfsg/SCons/Tool/qt3.py --- scons-4.4.0+dfsg/SCons/Tool/qt3.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/qt3.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,371 @@ +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +"""Tool-specific initialization for Qt. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +import os.path +import re + +import SCons.Action +import SCons.Builder +import SCons.Defaults +import SCons.Scanner +import SCons.Tool +import SCons.Util +import SCons.Tool.cxx +import SCons.Warnings +cplusplus = SCons.Tool.cxx + +class ToolQtWarning(SCons.Warnings.SConsWarning): + pass + +class GeneratedMocFileNotIncluded(ToolQtWarning): + pass + +class QtdirNotFound(ToolQtWarning): + pass + +SCons.Warnings.enableWarningClass(ToolQtWarning) + +header_extensions = [".h", ".hxx", ".hpp", ".hh"] +if SCons.Util.case_sensitive_suffixes('.h', '.H'): + header_extensions.append('.H') + +cxx_suffixes = cplusplus.CXXSuffixes + + +def find_platform_specific_qt3_paths(): + """ + find non-standard QT paths + + If the platform does not put QT tools in standard search paths, + the path is expected to be set using QT3DIR. SCons violates + the normal rule of not pulling from the user's environment + in this case. However, some test cases try to validate what + happens when QT3DIR is unset, so we need to try to make a guess. + + :return: a guess at a path + """ + + # qt3_bin_dirs = [] + qt3_bin_dir = None + if os.path.isfile('/etc/redhat-release'): + with open('/etc/redhat-release','r') as rr: + lines = rr.readlines() + distro = lines[0].split()[0] + if distro == 'CentOS': + # Centos installs QT under /usr/{lib,lib64}/qt{4,5,-3.3}/bin + # so we need to handle this differently + # qt3_bin_dirs = glob.glob('/usr/lib64/qt*/bin') + # TODO: all current Fedoras do the same, need to look deeper here. + qt3_bin_dir = '/usr/lib64/qt-3.3/bin' + + return qt3_bin_dir + + +QT3_BIN_DIR = find_platform_specific_qt3_paths() + +def checkMocIncluded(target, source, env): + moc = target[0] + cpp = source[0] + # looks like cpp.includes is cleared before the build stage :-( + # not really sure about the path transformations (moc.cwd? cpp.cwd?) :-/ + path = SCons.Defaults.CScan.path(env, moc.cwd) + includes = SCons.Defaults.CScan(cpp, env, path) + if moc not in includes: + SCons.Warnings.warn( + GeneratedMocFileNotIncluded, + "Generated moc file '%s' is not included by '%s'" % + (str(moc), str(cpp))) + +def find_file(filename, paths, node_factory): + for dir in paths: + node = node_factory(filename, dir) + if node.rexists(): + return node + return None + +class _Automoc: + """ + Callable class, which works as an emitter for Programs, SharedLibraries and + StaticLibraries. + """ + + def __init__(self, objBuilderName): + self.objBuilderName = objBuilderName + + def __call__(self, target, source, env): + """ + Smart autoscan function. Gets the list of objects for the Program + or Lib. Adds objects and builders for the special qt3 files. + """ + try: + if int(env.subst('$QT3_AUTOSCAN')) == 0: + return target, source + except ValueError: + pass + try: + debug = int(env.subst('$QT3_DEBUG')) + except ValueError: + debug = 0 + + # some shortcuts used in the scanner + splitext = SCons.Util.splitext + objBuilder = getattr(env, self.objBuilderName) + + # some regular expressions: + # Q_OBJECT detection + q_object_search = re.compile(r'[^A-Za-z0-9]Q_OBJECT[^A-Za-z0-9]') + # cxx and c comment 'eater' + #comment = re.compile(r'(//.*)|(/\*(([^*])|(\*[^/]))*\*/)') + # CW: something must be wrong with the regexp. See also bug #998222 + # CURRENTLY THERE IS NO TEST CASE FOR THAT + + # The following is kind of hacky to get builders working properly (FIXME) + objBuilderEnv = objBuilder.env + objBuilder.env = env + mocBuilderEnv = env.Moc.env + env.Moc.env = env + + # make a deep copy for the result; MocH objects will be appended + out_sources = source[:] + + for obj in source: + if not obj.has_builder(): + # binary obj file provided + if debug: + print("scons: qt3: '%s' seems to be a binary. Discarded." % str(obj)) + continue + cpp = obj.sources[0] + if not splitext(str(cpp))[1] in cxx_suffixes: + if debug: + print("scons: qt3: '%s' is no cxx file. Discarded." % str(cpp)) + # c or fortran source + continue + #cpp_contents = comment.sub('', cpp.get_text_contents()) + if debug: + print("scons: qt3: Getting contents of %s" % cpp) + cpp_contents = cpp.get_text_contents() + h=None + for h_ext in header_extensions: + # try to find the header file in the corresponding source + # directory + hname = splitext(cpp.name)[0] + h_ext + h = find_file(hname, (cpp.get_dir(),), env.File) + if h: + if debug: + print("scons: qt3: Scanning '%s' (header of '%s')" % (str(h), str(cpp))) + #h_contents = comment.sub('', h.get_text_contents()) + h_contents = h.get_text_contents() + break + if not h and debug: + print("scons: qt3: no header for '%s'." % (str(cpp))) + if h and q_object_search.search(h_contents): + # h file with the Q_OBJECT macro found -> add moc_cpp + moc_cpp = env.Moc(h) + moc_o = objBuilder(moc_cpp) + out_sources.append(moc_o) + #moc_cpp.target_scanner = SCons.Defaults.CScan + if debug: + print("scons: qt3: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(h), str(moc_cpp))) + if cpp and q_object_search.search(cpp_contents): + # cpp file with Q_OBJECT macro found -> add moc + # (to be included in cpp) + moc = env.Moc(cpp) + env.Ignore(moc, moc) + if debug: + print("scons: qt3: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(cpp), str(moc))) + #moc.source_scanner = SCons.Defaults.CScan + # restore the original env attributes (FIXME) + objBuilder.env = objBuilderEnv + env.Moc.env = mocBuilderEnv + + return (target, out_sources) + +AutomocShared = _Automoc('SharedObject') +AutomocStatic = _Automoc('StaticObject') + +def _detect_qt3(env): + """Not really safe, but fast method to detect the QT library""" + + QT3DIR = env.get('QT3DIR',None) + if not QT3DIR: + QT3DIR = os.environ.get('QTDIR',None) + if not QT3DIR: + moc = env.WhereIs('moc') or env.WhereIs('moc',QT3_BIN_DIR) + if moc: + QT3DIR = os.path.dirname(os.path.dirname(moc)) + SCons.Warnings.warn( + QtdirNotFound, + "Could not detect qt3, using moc executable as a hint (QT3DIR=%s)" % QT3DIR) + else: + QT3DIR = None + SCons.Warnings.warn( + QtdirNotFound, + "Could not detect qt3, using empty QT3DIR") + return QT3DIR + +def uicEmitter(target, source, env): + adjustixes = SCons.Util.adjustixes + bs = SCons.Util.splitext(str(source[0].name))[0] + bs = os.path.join(str(target[0].get_dir()),bs) + # first target (header) is automatically added by builder + if len(target) < 2: + # second target is implementation + target.append(adjustixes(bs, + env.subst('$QT3_UICIMPLPREFIX'), + env.subst('$QT3_UICIMPLSUFFIX'))) + if len(target) < 3: + # third target is moc file + target.append(adjustixes(bs, + env.subst('$QT3_MOCHPREFIX'), + env.subst('$QT3_MOCHSUFFIX'))) + return target, source + +def uicScannerFunc(node, env, path): + lookout = [] + lookout.extend(env['CPPPATH']) + lookout.append(str(node.rfile().dir)) + includes = re.findall("(.*?)", node.get_text_contents()) + result = [] + for incFile in includes: + dep = env.FindFile(incFile,lookout) + if dep: + result.append(dep) + return result + +uicScanner = SCons.Scanner.ScannerBase(uicScannerFunc, + name = "UicScanner", + node_class = SCons.Node.FS.File, + node_factory = SCons.Node.FS.File, + recursive = 0) + +def generate(env): + """Add Builders and construction variables for qt3 to an Environment.""" + CLVar = SCons.Util.CLVar + Action = SCons.Action.Action + Builder = SCons.Builder.Builder + + qt3path = _detect_qt3(env) + if qt3path is None: + return None + + env.SetDefault(QT3DIR = qt3path, + QT3_BINPATH = os.path.join('$QT3DIR', 'bin'), + QT3_CPPPATH = os.path.join('$QT3DIR', 'include'), + QT3_LIBPATH = os.path.join('$QT3DIR', 'lib'), + QT3_MOC = os.path.join('$QT3_BINPATH','moc'), + QT3_UIC = os.path.join('$QT3_BINPATH','uic'), + QT3_LIB = 'qt', # may be set to qt-mt + + QT3_AUTOSCAN = 1, # scan for moc'able sources + + # Some QT specific flags. I don't expect someone wants to + # manipulate those ... + QT3_UICIMPLFLAGS = CLVar(''), + QT3_UICDECLFLAGS = CLVar(''), + QT3_MOCFROMHFLAGS = CLVar(''), + QT3_MOCFROMCXXFLAGS = CLVar('-i'), + + # suffixes/prefixes for the headers / sources to generate + QT3_UICDECLPREFIX = '', + QT3_UICDECLSUFFIX = '.h', + QT3_UICIMPLPREFIX = 'uic_', + QT3_UICIMPLSUFFIX = '$CXXFILESUFFIX', + QT3_MOCHPREFIX = 'moc_', + QT3_MOCHSUFFIX = '$CXXFILESUFFIX', + QT3_MOCCXXPREFIX = '', + QT3_MOCCXXSUFFIX = '.moc', + QT3_UISUFFIX = '.ui', + + # Commands for the qt3 support ... + # command to generate header, implementation and moc-file + # from a .ui file + QT3_UICCOM = [ + CLVar('$QT3_UIC $QT3_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE'), + CLVar('$QT3_UIC $QT3_UICIMPLFLAGS -impl ${TARGETS[0].file} ' + '-o ${TARGETS[1]} $SOURCE'), + CLVar('$QT3_MOC $QT3_MOCFROMHFLAGS -o ${TARGETS[2]} ${TARGETS[0]}')], + # command to generate meta object information for a class + # declarated in a header + QT3_MOCFROMHCOM = ( + '$QT3_MOC $QT3_MOCFROMHFLAGS -o ${TARGETS[0]} $SOURCE'), + # command to generate meta object information for a class + # declarated in a cpp file + QT3_MOCFROMCXXCOM = [ + CLVar('$QT3_MOC $QT3_MOCFROMCXXFLAGS -o ${TARGETS[0]} $SOURCE'), + Action(checkMocIncluded,None)]) + + # ... and the corresponding builders + uicBld = Builder(action=SCons.Action.Action('$QT3_UICCOM', '$QT3_UICCOMSTR'), + emitter=uicEmitter, + src_suffix='$QT3_UISUFFIX', + suffix='$QT3_UICDECLSUFFIX', + prefix='$QT3_UICDECLPREFIX', + source_scanner=uicScanner) + mocBld = Builder(action={}, prefix={}, suffix={}) + for h in header_extensions: + act = SCons.Action.Action('$QT3_MOCFROMHCOM', '$QT3_MOCFROMHCOMSTR') + mocBld.add_action(h, act) + mocBld.prefix[h] = '$QT3_MOCHPREFIX' + mocBld.suffix[h] = '$QT3_MOCHSUFFIX' + for cxx in cxx_suffixes: + act = SCons.Action.Action('$QT3_MOCFROMCXXCOM', '$QT3_MOCFROMCXXCOMSTR') + mocBld.add_action(cxx, act) + mocBld.prefix[cxx] = '$QT3_MOCCXXPREFIX' + mocBld.suffix[cxx] = '$QT3_MOCCXXSUFFIX' + + # register the builders + env['BUILDERS']['Uic'] = uicBld + env['BUILDERS']['Moc'] = mocBld + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + static_obj.add_src_builder('Uic') + shared_obj.add_src_builder('Uic') + + # We use the emitters of Program / StaticLibrary / SharedLibrary + # to scan for moc'able files + # We can't refer to the builders directly, we have to fetch them + # as Environment attributes because that sets them up to be called + # correctly later by our emitter. + env.AppendUnique(PROGEMITTER =[AutomocStatic], + SHLIBEMITTER=[AutomocShared], + LDMODULEEMITTER=[AutomocShared], + LIBEMITTER =[AutomocStatic], + # Of course, we need to link against the qt3 libraries + CPPPATH=["$QT3_CPPPATH"], + LIBPATH=["$QT3_LIBPATH"], + LIBS=['$QT3_LIB']) + +def exists(env): + return _detect_qt3(env) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/SCons/Tool/qt3.xml scons-4.5.2+dfsg/SCons/Tool/qt3.xml --- scons-4.4.0+dfsg/SCons/Tool/qt3.xml 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/qt3.xml 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,527 @@ + + + + +%scons; + +%builders-mod; + +%functions-mod; + +%tools-mod; + +%variables-mod; +]> + + + + + + +Sets &consvars; for building Qt3 applications. + + + +This tool is only suitable for building targeted to Qt3, +which is obsolete +(the tool is deprecated since 4.3, +and was renamed to qt3 in 4.5.0. +). +There are contributed tools for Qt4 and Qt5, see + +https://github.com/SCons/scons-contrib. +Qt4 has also passed end of life for standard support (in Dec 2015). + + + +Note paths for these &consvars; are assembled +using the os.path.join method +so they will have the appropriate separator at runtime, +but are listed here in the various +entries only with the '/' separator +for simplicity. + + + +In addition, the &consvars; +&cv-link-CPPPATH;, +&cv-link-LIBPATH; and +&cv-link-LIBS; may be modified +and the variables +&cv-link-PROGEMITTER;, &cv-link-SHLIBEMITTER; and &cv-link-LIBEMITTER; +are modified. Because the build-performance is affected when using this tool, +you have to explicitly specify it at Environment creation: + + + +Environment(tools=['default','qt3']) + + + +The &t-qt3; tool supports the following operations: + + + +Automatic moc file generation from header files. +You do not have to specify moc files explicitly, the tool does it for you. +However, there are a few preconditions to do so: Your header file must have +the same filebase as your implementation file and must stay in the same +directory. It must have one of the suffixes +.h, +.hpp, +.H, +.hxx, +.hh. +You can turn off automatic moc file generation by setting +&cv-link-QT3_AUTOSCAN; to False. +See also the corresponding +&b-link-Moc; Builder. + + + +Automatic moc file generation from C++ files. +As described in the Qt documentation, include the moc file at the end of +the C++ file. Note that you have to include the file, which is generated +by the transformation +${QT3_MOCCXXPREFIX}<basename>${QT3_MOCCXXSUFFIX}, by default +<basename>.mo. A warning is generated after building the moc file if you +do not include the correct file. If you are using &f-link-VariantDir;, you may +need to specify duplicate=True. +You can turn off automatic moc file generation by setting &cv-QT3_AUTOSCAN; to +False. See also the corresponding +&b-link-Moc; Builder. + + + +Automatic handling of .ui files. +The implementation files generated from .ui +files are handled much the same as yacc or lex files. +Each .ui file given as a source of &b-link-Program;, +&b-link-Library; or &b-link-SharedLibrary; +will generate three files: the declaration file, the +implementation file and a moc file. Because there are also generated headers, +you may need to specify duplicate=True in calls to +&f-link-VariantDir;. +See also the corresponding +&b-link-Uic; Builder. + + + +QT3DIR +QT3_BINPATH +QT3_CPPPATH +QT3_LIBPATH +QT3_MOC +QT3_UIC +QT3_LIB +QT3_AUTOSCAN +QT3_UICIMPLFLAGS +QT3_UICDECLFLAGS +QT3_MOCFROMHFLAGS +QT3_MOCFROMCXXFLAGS +QT3_UICDECLPREFIX +QT3_UICDECLSUFFIX +QT3_UICIMPLPREFIX +QT3_UICIMPLSUFFIX +QT3_MOCHPREFIX +QT3_MOCHSUFFIX +QT3_MOCCXXPREFIX +QT3_MOCCXXSUFFIX +QT3_UISUFFIX +QT3_UICCOM +QT3_MOCFROMHCOM +QT3_MOCFROMCXXCOM + + +QT3DIR + + + + + + +Builds an output file from a moc input file. +moc input files are either header files or C++ files. +This builder is only available after using the +tool &t-link-qt3;. See the &cv-link-QT3DIR; variable for more information. +Example: + + + +env.Moc('foo.h') # generates moc_foo.cc +env.Moc('foo.cpp') # generates foo.moc + + + + + + + +Builds a header file, an implementation file and a moc file from an ui file. +and returns the corresponding nodes in the that order. +This builder is only available after using the tool &t-link-qt3;. +Note: you can specify .ui files directly as source +files to the &b-link-Program;, +&b-link-Library; and &b-link-SharedLibrary; builders +without using this builder. Using this builder lets you override the standard +naming conventions (be careful: prefixes are always prepended to names of +built files; if you don't want prefixes, you may set them to ``). +See the &cv-link-QT3DIR; variable for more information. +Example: + + + +env.Uic('foo.ui') # -> ['foo.h', 'uic_foo.cc', 'moc_foo.cc'] +env.Uic( + target=Split('include/foo.h gen/uicfoo.cc gen/mocfoo.cc'), + source='foo.ui' +) # -> ['include/foo.h', 'gen/uicfoo.cc', 'gen/mocfoo.cc'] + + + + + + + +The path to the Qt installation to build against. +If not already set, +&t-link-qt3; tool tries to obtain this from +os.environ; +if not found there, it tries to make a guess. + + +Changed in 4.5.0: renamed from QTDIR. + + + + + + + +Turn off scanning for mocable files. Use the &b-link-Moc; Builder to explicitly +specify files to run moc on. + + +Changed in 4.5.0: renamed from QT_AUTOSCAN. + + + + + + + +The path where the Qt binaries are installed. +The default value is '&cv-link-QT3DIR;/bin'. + + +Changed in 4.5.0: renamed from QT_BINPATH. + + + + + + + +The path where the Qt header files are installed. +The default value is '&cv-link-QT3DIR;/include'. +Note: If you set this variable to None, +the tool won't change the &cv-link-CPPPATH; +construction variable. + + +Changed in 4.5.0: renamed from QT_CPPPATH. + + + + + + + +Prints lots of debugging information while scanning for moc files. + + +Changed in 4.5.0: renamed from QT_DEBUG. + + + + + + + +Default value is 'qt'. +You may want to set this to 'qt-mt'. +Note: If you set this variable to None, +the tool won't change the &cv-link-LIBS; variable. + + +Changed in 4.5.0: renamed from QT_LIB. + + + + + + + +The path where the Qt libraries are installed. +The default value is '&cv-link-QT3DIR;/lib'. +Note: If you set this variable to None, +the tool won't change the &cv-link-LIBPATH; +construction variable. + + +Changed in 4.5.0: renamed from QT_LIBPATH. + + + + + + + +Default value is '&cv-link-QT3_BINPATH;/moc'. + + + + + + + +Default value is ''. +Prefix for moc output files when source is a C++ file. + + + + + + + +Default value is '.moc'. +Suffix for moc output files when source is a C++ file. + + +Changed in 4.5.0: renamed from QT_MOCCXXSUFFIX. + + + + + + + +Default value is '-i'. +These flags are passed to moc when moccing a C++ file. + + +Changed in 4.5.0: renamed from QT_MOCFROMCXXFLAGS. + + + + + + + +Command to generate a moc file from a C++ file. + + +Changed in 4.5.0: renamed from QT_MOCFROMCXXCOM. + + + + + + + +The string displayed when generating a moc file from a C++ file. +If this is not set, then &cv-link-QT3_MOCFROMCXXCOM; (the command line) is displayed. + + +Changed in 4.5.0: renamed from QT_MOCFROMCXXCOMSTR. + + + + + + + +Command to generate a moc file from a header. + + +Changed in 4.5.0: renamed from QT_MOCFROMSHCOM. + + + + + + + +The string displayed when generating a moc file from a C++ file. +If this is not set, then &cv-link-QT3_MOCFROMHCOM; (the command line) is displayed. + + +Changed in 4.5.0: renamed from QT_MOCFROMSHCOMSTR. + + + + + + + +Default value is ''. These flags are passed to moc +when moccing a header file. + + +Changed in 4.5.0: renamed from QT_MOCFROMSHFLAGS. + + + + + + + +Default value is 'moc_'. +Prefix for moc output files when source is a header. + + +Changed in 4.5.0: renamed from QT_MOCHPREFIX. + + + + + + + +Default value is '&cv-link-CXXFILESUFFIX;'. +Suffix for moc output files when source is a header. + + +Changed in 4.5.0: renamed from QT_MOCHSUFFIX. + + + + + + + +Default value is '&cv-link-QT3_BINPATH;/uic'. + + +Changed in 4.5.0: renamed from QT_UIC. + + + + + + + +Command to generate header files from .ui files. + + +Changed in 4.5.0: renamed from QT_UICCOM. + + + + + + + +The string displayed when generating header files from .ui files. +If this is not set, then &cv-link-QT3_UICCOM; (the command line) is displayed. + + +Changed in 4.5.0: renamed from QT_UICCOMSTR. + + + + + + + +Default value is ''. These flags are passed to uic +when creating a header file from a .ui file. + + +Changed in 4.5.0: renamed from QT_UICDECLFLAGS. + + + + + + + +Default value is ''. +Prefix for uic generated header files. + + +Changed in 4.5.0: renamed from QT_UICDECLPREFIX. + + + + + + + +Default value is '.h'. +Suffix for uic generated header files. + + +Changed in 4.5.0: renamed from QT_UICDECLSUFFIX. + + + + + + + +Default value is ''. +These flags are passed to uic when creating a C++ +file from a .ui file. + + +Changed in 4.5.0: renamed from QT_UICIMPFLAGS. + + + + + + + +Default value is 'uic_'. +Prefix for uic generated implementation files. + + +Changed in 4.5.0: renamed from QT_UICIMPLPREFIX. + + + + + + + +Default value is '&cv-link-CXXFILESUFFIX;'. Suffix for uic generated implementation +files. + + +Changed in 4.5.0: renamed from QT_UICIMPLSUFFIX. + + + + + + + +Default value is '.ui'. +Suffix of designer input files. + + +Changed in 4.5.0: renamed from QT_UISUFFIX. + + + + + diff -Nru scons-4.4.0+dfsg/SCons/Tool/qt.py scons-4.5.2+dfsg/SCons/Tool/qt.py --- scons-4.4.0+dfsg/SCons/Tool/qt.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/qt.py 2023-03-21 16:17:04.000000000 +0000 @@ -21,351 +21,18 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -"""Tool-specific initialization for Qt. - -There normally shouldn't be any need to import this module directly. -It will usually be imported through the generic SCons.Tool.Tool() -selection method. """ - -import os.path -import re - -import SCons.Action -import SCons.Builder -import SCons.Defaults -import SCons.Scanner -import SCons.Tool -import SCons.Util -import SCons.Tool.cxx -import SCons.Warnings -cplusplus = SCons.Tool.cxx - -class ToolQtWarning(SCons.Warnings.SConsWarning): - pass - -class GeneratedMocFileNotIncluded(ToolQtWarning): - pass - -class QtdirNotFound(ToolQtWarning): - pass - -SCons.Warnings.enableWarningClass(ToolQtWarning) - -header_extensions = [".h", ".hxx", ".hpp", ".hh"] -if SCons.Util.case_sensitive_suffixes('.h', '.H'): - header_extensions.append('.H') - -cxx_suffixes = cplusplus.CXXSuffixes - - -def find_platform_specific_qt_paths(): - """ - find non-standard QT paths - - If the platform does not put QT tools in standard search paths, - the path is expected to be set using QTDIR. SCons violates - the normal rule of not pulling from the user's environment - in this case. However, some test cases try to validate what - happens when QTDIR is unset, so we need to try to make a guess. - - :return: a guess at a path - """ - - # qt_bin_dirs = [] - qt_bin_dir = None - if os.path.isfile('/etc/redhat-release'): - with open('/etc/redhat-release','r') as rr: - lines = rr.readlines() - distro = lines[0].split()[0] - if distro == 'CentOS': - # Centos installs QT under /usr/{lib,lib64}/qt{4,5,-3.3}/bin - # so we need to handle this differently - # qt_bin_dirs = glob.glob('/usr/lib64/qt*/bin') - # TODO: all current Fedoras do the same, need to look deeper here. - qt_bin_dir = '/usr/lib64/qt-3.3/bin' - - return qt_bin_dir - - -QT_BIN_DIR = find_platform_specific_qt_paths() - -def checkMocIncluded(target, source, env): - moc = target[0] - cpp = source[0] - # looks like cpp.includes is cleared before the build stage :-( - # not really sure about the path transformations (moc.cwd? cpp.cwd?) :-/ - path = SCons.Defaults.CScan.path(env, moc.cwd) - includes = SCons.Defaults.CScan(cpp, env, path) - if moc not in includes: - SCons.Warnings.warn( - GeneratedMocFileNotIncluded, - "Generated moc file '%s' is not included by '%s'" % - (str(moc), str(cpp))) - -def find_file(filename, paths, node_factory): - for dir in paths: - node = node_factory(filename, dir) - if node.rexists(): - return node - return None - -class _Automoc: - """ - Callable class, which works as an emitter for Programs, SharedLibraries and - StaticLibraries. - """ - - def __init__(self, objBuilderName): - self.objBuilderName = objBuilderName - - def __call__(self, target, source, env): - """ - Smart autoscan function. Gets the list of objects for the Program - or Lib. Adds objects and builders for the special qt files. - """ - try: - if int(env.subst('$QT_AUTOSCAN')) == 0: - return target, source - except ValueError: - pass - try: - debug = int(env.subst('$QT_DEBUG')) - except ValueError: - debug = 0 - - # some shortcuts used in the scanner - splitext = SCons.Util.splitext - objBuilder = getattr(env, self.objBuilderName) - - # some regular expressions: - # Q_OBJECT detection - q_object_search = re.compile(r'[^A-Za-z0-9]Q_OBJECT[^A-Za-z0-9]') - # cxx and c comment 'eater' - #comment = re.compile(r'(//.*)|(/\*(([^*])|(\*[^/]))*\*/)') - # CW: something must be wrong with the regexp. See also bug #998222 - # CURRENTLY THERE IS NO TEST CASE FOR THAT - - # The following is kind of hacky to get builders working properly (FIXME) - objBuilderEnv = objBuilder.env - objBuilder.env = env - mocBuilderEnv = env.Moc.env - env.Moc.env = env - - # make a deep copy for the result; MocH objects will be appended - out_sources = source[:] - - for obj in source: - if not obj.has_builder(): - # binary obj file provided - if debug: - print("scons: qt: '%s' seems to be a binary. Discarded." % str(obj)) - continue - cpp = obj.sources[0] - if not splitext(str(cpp))[1] in cxx_suffixes: - if debug: - print("scons: qt: '%s' is no cxx file. Discarded." % str(cpp)) - # c or fortran source - continue - #cpp_contents = comment.sub('', cpp.get_text_contents()) - if debug: - print("scons: qt: Getting contents of %s" % cpp) - cpp_contents = cpp.get_text_contents() - h=None - for h_ext in header_extensions: - # try to find the header file in the corresponding source - # directory - hname = splitext(cpp.name)[0] + h_ext - h = find_file(hname, (cpp.get_dir(),), env.File) - if h: - if debug: - print("scons: qt: Scanning '%s' (header of '%s')" % (str(h), str(cpp))) - #h_contents = comment.sub('', h.get_text_contents()) - h_contents = h.get_text_contents() - break - if not h and debug: - print("scons: qt: no header for '%s'." % (str(cpp))) - if h and q_object_search.search(h_contents): - # h file with the Q_OBJECT macro found -> add moc_cpp - moc_cpp = env.Moc(h) - moc_o = objBuilder(moc_cpp) - out_sources.append(moc_o) - #moc_cpp.target_scanner = SCons.Defaults.CScan - if debug: - print("scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(h), str(moc_cpp))) - if cpp and q_object_search.search(cpp_contents): - # cpp file with Q_OBJECT macro found -> add moc - # (to be included in cpp) - moc = env.Moc(cpp) - env.Ignore(moc, moc) - if debug: - print("scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(cpp), str(moc))) - #moc.source_scanner = SCons.Defaults.CScan - # restore the original env attributes (FIXME) - objBuilder.env = objBuilderEnv - env.Moc.env = mocBuilderEnv - - return (target, out_sources) - -AutomocShared = _Automoc('SharedObject') -AutomocStatic = _Automoc('StaticObject') - -def _detect(env): - """Not really safe, but fast method to detect the QT library""" - - QTDIR = env.get('QTDIR',None) - if not QTDIR: - QTDIR = os.environ.get('QTDIR',None) - if not QTDIR: - moc = env.WhereIs('moc') or env.WhereIs('moc',QT_BIN_DIR) - if moc: - QTDIR = os.path.dirname(os.path.dirname(moc)) - SCons.Warnings.warn( - QtdirNotFound, - "Could not detect qt, using moc executable as a hint (QTDIR=%s)" % QTDIR) - else: - QTDIR = None - SCons.Warnings.warn( - QtdirNotFound, - "Could not detect qt, using empty QTDIR") - return QTDIR - -def uicEmitter(target, source, env): - adjustixes = SCons.Util.adjustixes - bs = SCons.Util.splitext(str(source[0].name))[0] - bs = os.path.join(str(target[0].get_dir()),bs) - # first target (header) is automatically added by builder - if len(target) < 2: - # second target is implementation - target.append(adjustixes(bs, - env.subst('$QT_UICIMPLPREFIX'), - env.subst('$QT_UICIMPLSUFFIX'))) - if len(target) < 3: - # third target is moc file - target.append(adjustixes(bs, - env.subst('$QT_MOCHPREFIX'), - env.subst('$QT_MOCHSUFFIX'))) - return target, source - -def uicScannerFunc(node, env, path): - lookout = [] - lookout.extend(env['CPPPATH']) - lookout.append(str(node.rfile().dir)) - includes = re.findall("(.*?)", node.get_text_contents()) - result = [] - for incFile in includes: - dep = env.FindFile(incFile,lookout) - if dep: - result.append(dep) - return result - -uicScanner = SCons.Scanner.ScannerBase(uicScannerFunc, - name = "UicScanner", - node_class = SCons.Node.FS.File, - node_factory = SCons.Node.FS.File, - recursive = 0) +This is a fake tool to instruct any builds still referencing 'qt' instead +of the new 'qt3' or a newer QT builder how to fix their now broken build. +""" +import SCons.Errors def generate(env): - """Add Builders and construction variables for qt to an Environment.""" - CLVar = SCons.Util.CLVar - Action = SCons.Action.Action - Builder = SCons.Builder.Builder - - SCons.Warnings.warn( - SCons.Warnings.ToolQtDeprecatedWarning, "Tool module for Qt version 3 is deprecated" + raise SCons.Errors.UserError( + "Deprecated tool 'qt' renamed to 'qt3'. " + "Please update your build accordingly. " + "'qt3' will be removed entirely in a future release." ) - env.SetDefault(QTDIR = _detect(env), - QT_BINPATH = os.path.join('$QTDIR', 'bin'), - QT_CPPPATH = os.path.join('$QTDIR', 'include'), - QT_LIBPATH = os.path.join('$QTDIR', 'lib'), - QT_MOC = os.path.join('$QT_BINPATH','moc'), - QT_UIC = os.path.join('$QT_BINPATH','uic'), - QT_LIB = 'qt', # may be set to qt-mt - - QT_AUTOSCAN = 1, # scan for moc'able sources - - # Some QT specific flags. I don't expect someone wants to - # manipulate those ... - QT_UICIMPLFLAGS = CLVar(''), - QT_UICDECLFLAGS = CLVar(''), - QT_MOCFROMHFLAGS = CLVar(''), - QT_MOCFROMCXXFLAGS = CLVar('-i'), - - # suffixes/prefixes for the headers / sources to generate - QT_UICDECLPREFIX = '', - QT_UICDECLSUFFIX = '.h', - QT_UICIMPLPREFIX = 'uic_', - QT_UICIMPLSUFFIX = '$CXXFILESUFFIX', - QT_MOCHPREFIX = 'moc_', - QT_MOCHSUFFIX = '$CXXFILESUFFIX', - QT_MOCCXXPREFIX = '', - QT_MOCCXXSUFFIX = '.moc', - QT_UISUFFIX = '.ui', - - # Commands for the qt support ... - # command to generate header, implementation and moc-file - # from a .ui file - QT_UICCOM = [ - CLVar('$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE'), - CLVar('$QT_UIC $QT_UICIMPLFLAGS -impl ${TARGETS[0].file} ' - '-o ${TARGETS[1]} $SOURCE'), - CLVar('$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[2]} ${TARGETS[0]}')], - # command to generate meta object information for a class - # declarated in a header - QT_MOCFROMHCOM = ( - '$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[0]} $SOURCE'), - # command to generate meta object information for a class - # declarated in a cpp file - QT_MOCFROMCXXCOM = [ - CLVar('$QT_MOC $QT_MOCFROMCXXFLAGS -o ${TARGETS[0]} $SOURCE'), - Action(checkMocIncluded,None)]) - - # ... and the corresponding builders - uicBld = Builder(action=SCons.Action.Action('$QT_UICCOM', '$QT_UICCOMSTR'), - emitter=uicEmitter, - src_suffix='$QT_UISUFFIX', - suffix='$QT_UICDECLSUFFIX', - prefix='$QT_UICDECLPREFIX', - source_scanner=uicScanner) - mocBld = Builder(action={}, prefix={}, suffix={}) - for h in header_extensions: - act = SCons.Action.Action('$QT_MOCFROMHCOM', '$QT_MOCFROMHCOMSTR') - mocBld.add_action(h, act) - mocBld.prefix[h] = '$QT_MOCHPREFIX' - mocBld.suffix[h] = '$QT_MOCHSUFFIX' - for cxx in cxx_suffixes: - act = SCons.Action.Action('$QT_MOCFROMCXXCOM', '$QT_MOCFROMCXXCOMSTR') - mocBld.add_action(cxx, act) - mocBld.prefix[cxx] = '$QT_MOCCXXPREFIX' - mocBld.suffix[cxx] = '$QT_MOCCXXSUFFIX' - - # register the builders - env['BUILDERS']['Uic'] = uicBld - env['BUILDERS']['Moc'] = mocBld - static_obj, shared_obj = SCons.Tool.createObjBuilders(env) - static_obj.add_src_builder('Uic') - shared_obj.add_src_builder('Uic') - - # We use the emitters of Program / StaticLibrary / SharedLibrary - # to scan for moc'able files - # We can't refer to the builders directly, we have to fetch them - # as Environment attributes because that sets them up to be called - # correctly later by our emitter. - env.AppendUnique(PROGEMITTER =[AutomocStatic], - SHLIBEMITTER=[AutomocShared], - LDMODULEEMITTER=[AutomocShared], - LIBEMITTER =[AutomocStatic], - # Of course, we need to link against the qt libraries - CPPPATH=["$QT_CPPPATH"], - LIBPATH=["$QT_LIBPATH"], - LIBS=['$QT_LIB']) - def exists(env): - return _detect(env) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: + return False diff -Nru scons-4.4.0+dfsg/SCons/Tool/qt.xml scons-4.5.2+dfsg/SCons/Tool/qt.xml --- scons-4.4.0+dfsg/SCons/Tool/qt.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/qt.xml 2023-03-21 16:17:04.000000000 +0000 @@ -26,422 +26,9 @@ -Sets &consvars; for building Qt3 applications. - - - -This tool is only suitable for building targeted to Qt3, -which is obsolete -(the tool is deprecated since 4.3). -There are contributed tools for Qt4 and Qt5, see - -https://github.com/SCons/scons-contrib. -Qt4 has also passed end of life for standard support (in Dec 2015). - - - -Note paths for these &consvars; are assembled -using the os.path.join method -so they will have the appropriate separator at runtime, -but are listed here in the various -entries only with the '/' separator -for simplicity. - - - -In addition, the &consvars; -&cv-link-CPPPATH;, -&cv-link-LIBPATH; and -&cv-link-LIBS; may be modified -and the variables -&cv-link-PROGEMITTER;, &cv-link-SHLIBEMITTER; and &cv-link-LIBEMITTER; -are modified. Because the build-performance is affected when using this tool, -you have to explicitly specify it at Environment creation: - - - -Environment(tools=['default','qt']) - - - -The &t-qt; tool supports the following operations: - - - -Automatic moc file generation from header files. -You do not have to specify moc files explicitly, the tool does it for you. -However, there are a few preconditions to do so: Your header file must have -the same filebase as your implementation file and must stay in the same -directory. It must have one of the suffixes -.h, -.hpp, -.H, -.hxx, -.hh. -You can turn off automatic moc file generation by setting -&cv-link-QT_AUTOSCAN; to False. -See also the corresponding -&b-link-Moc; Builder. - - - -Automatic moc file generation from C++ files. -As described in the Qt documentation, include the moc file at the end of -the C++ file. Note that you have to include the file, which is generated -by the transformation -${QT_MOCCXXPREFIX}<basename>${QT_MOCCXXSUFFIX}, by default -<basename>.mo. A warning is generated after building the moc file if you -do not include the correct file. If you are using &f-link-VariantDir;, you may -need to specify duplicate=True. -You can turn off automatic moc file generation by setting &cv-QT_AUTOSCAN; to -False. See also the corresponding -&b-link-Moc; Builder. - - - -Automatic handling of .ui files. -The implementation files generated from .ui -files are handled much the same as yacc or lex files. -Each .ui file given as a source of &b-link-Program;, -&b-link-Library; or &b-link-SharedLibrary; -will generate three files: the declaration file, the -implementation file and a moc file. Because there are also generated headers, -you may need to specify duplicate=True in calls to -&f-link-VariantDir;. -See also the corresponding -&b-link-Uic; Builder. +Placeholder tool to alert anyone still using qt tools to switch to qt3 or newer tool. - -QTDIR -QT_BINPATH -QT_CPPPATH -QT_LIBPATH -QT_MOC -QT_UIC -QT_LIB -QT_AUTOSCAN -QT_UICIMPLFLAGS -QT_UICDECLFLAGS -QT_MOCFROMHFLAGS -QT_MOCFROMCXXFLAGS -QT_UICDECLPREFIX -QT_UICDECLSUFFIX -QT_UICIMPLPREFIX -QT_UICIMPLSUFFIX -QT_MOCHPREFIX -QT_MOCHSUFFIX -QT_MOCCXXPREFIX -QT_MOCCXXSUFFIX -QT_UISUFFIX -QT_UICCOM -QT_MOCFROMHCOM -QT_MOCFROMCXXCOM - - -QTDIR - - - - -Builds an output file from a moc input file. -moc input files are either header files or C++ files. -This builder is only available after using the -tool &t-link-qt;. See the &cv-link-QTDIR; variable for more information. -Example: - - - -env.Moc('foo.h') # generates moc_foo.cc -env.Moc('foo.cpp') # generates foo.moc - - - - - - - -Builds a header file, an implementation file and a moc file from an ui file. -and returns the corresponding nodes in the that order. -This builder is only available after using the tool &t-link-qt;. -Note: you can specify .ui files directly as source -files to the &b-link-Program;, -&b-link-Library; and &b-link-SharedLibrary; builders -without using this builder. Using this builder lets you override the standard -naming conventions (be careful: prefixes are always prepended to names of -built files; if you don't want prefixes, you may set them to ``). -See the &cv-link-QTDIR; variable for more information. -Example: - - - -env.Uic('foo.ui') # -> ['foo.h', 'uic_foo.cc', 'moc_foo.cc'] -env.Uic( - target=Split('include/foo.h gen/uicfoo.cc gen/mocfoo.cc'), - source='foo.ui' -) # -> ['include/foo.h', 'gen/uicfoo.cc', 'gen/mocfoo.cc'] - - - - - - - -The path to the Qt installation to build against. -If not already set, -&t-link-qt; tool tries to obtain this from -os.environ; -if not found there, it tries to make a guess. - - - - - - - -Turn off scanning for mocable files. Use the &b-link-Moc; Builder to explicitly -specify files to run moc on. - - - - - - - -The path where the Qt binaries are installed. -The default value is '&cv-link-QTDIR;/bin'. - - - - - - - -The path where the Qt header files are installed. -The default value is '&cv-link-QTDIR;/include'. -Note: If you set this variable to None, -the tool won't change the &cv-link-CPPPATH; -construction variable. - - - - - - - -Prints lots of debugging information while scanning for moc files. - - - - - - - -Default value is 'qt'. -You may want to set this to 'qt-mt'. -Note: If you set this variable to None, -the tool won't change the &cv-link-LIBS; variable. - - - - - - - -The path where the Qt libraries are installed. -The default value is '&cv-link-QTDIR;/lib'. -Note: If you set this variable to None, -the tool won't change the &cv-link-LIBPATH; -construction variable. - - - - - - - -Default value is '&cv-link-QT_BINPATH;/moc'. - - - - - - - -Default value is ''. -Prefix for moc output files when source is a C++ file. - - - - - - - -Default value is '.moc'. -Suffix for moc output files when source is a C++ file. - - - - - - - -Default value is '-i'. -These flags are passed to moc when moccing a C++ file. - - - - - - - -Command to generate a moc file from a C++ file. - - - - - - - -The string displayed when generating a moc file from a C++ file. -If this is not set, then &cv-link-QT_MOCFROMCXXCOM; (the command line) is displayed. - - - - - - - -Command to generate a moc file from a header. - - - - - - - -The string displayed when generating a moc file from a C++ file. -If this is not set, then &cv-link-QT_MOCFROMHCOM; (the command line) is displayed. - - - - - - - -Default value is ''. These flags are passed to moc -when moccing a header file. - - - - - - - -Default value is 'moc_'. -Prefix for moc output files when source is a header. - - - - - - - -Default value is '&cv-link-CXXFILESUFFIX;'. -Suffix for moc output files when source is a header. - - - - - - - -Default value is '&cv-link-QT_BINPATH;/uic'. - - - - - - - -Command to generate header files from .ui files. - - - - - - - -The string displayed when generating header files from .ui files. -If this is not set, then &cv-link-QT_UICCOM; (the command line) is displayed. - - - - - - - -Default value is ''. These flags are passed to uic -when creating a header file from a .ui file. - - - - - - - -Default value is ''. -Prefix for uic generated header files. - - - - - - - -Default value is '.h'. -Suffix for uic generated header files. - - - - - - - -Default value is ''. -These flags are passed to uic when creating a C++ -file from a .ui file. - - - - - - - -Default value is 'uic_'. -Prefix for uic generated implementation files. - - - - - - - -Default value is '&cv-link-CXXFILESUFFIX;'. Suffix for uic generated implementation -files. - - - - - - - -Default value is '.ui'. -Suffix of designer input files. - - - - diff -Nru scons-4.4.0+dfsg/SCons/Tool/rmic.xml scons-4.5.2+dfsg/SCons/Tool/rmic.xml --- scons-4.4.0+dfsg/SCons/Tool/rmic.xml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Tool/rmic.xml 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ SCons.Warnings +# --> SCons.Errors +# If you run into places that have to do local imports for this reason, +# annotate them for pylint and for human readers to know why: +# pylint: disable=import-outside-toplevel +# Be aware that Black can break this if the annotated line is too +# long and it wants to split: +# from SCons.Errors import ( +# SConsEnvironmentError, +# ) # pylint: disable=import-outside-toplevel +# That's syntactically valid, but pylint won't recorgnize it with the +# annotation at the end, it would have to be on the first line +# (issues filed upstream, for now just be aware) + +PYPY = hasattr(sys, 'pypy_translation_info') + +# this string will be hashed if a Node refers to a file that doesn't exist +# in order to distinguish from a file that exists but is empty. +NOFILE = "SCONS_MAGIC_MISSING_FILE_STRING" + +# unused? +def dictify(keys, values, result=None) -> dict: + if result is None: + result = {} + result.update(zip(keys, values)) + return result + +_ALTSEP = os.altsep +if _ALTSEP is None and sys.platform == 'win32': + # My ActivePython 2.0.1 doesn't set os.altsep! What gives? + _ALTSEP = '/' +if _ALTSEP: + def rightmost_separator(path, sep): + return max(path.rfind(sep), path.rfind(_ALTSEP)) +else: + def rightmost_separator(path, sep): + return path.rfind(sep) + +# First two from the Python Cookbook, just for completeness. +# (Yeah, yeah, YAGNI...) +def containsAny(s, pat) -> bool: + """Check whether string `s` contains ANY of the items in `pat`.""" + return any(c in s for c in pat) + +def containsAll(s, pat) -> bool: + """Check whether string `s` contains ALL of the items in `pat`.""" + return all(c in s for c in pat) + +def containsOnly(s, pat) -> bool: + """Check whether string `s` contains ONLY items in `pat`.""" + for c in s: + if c not in pat: + return False + return True + + +# TODO: Verify this method is STILL faster than os.path.splitext +def splitext(path) -> tuple: + """Split `path` into a (root, ext) pair. + + Same as :mod:`os.path.splitext` but faster. + """ + sep = rightmost_separator(path, os.sep) + dot = path.rfind('.') + # An ext is only real if it has at least one non-digit char + if dot > sep and not path[dot + 1:].isdigit(): + return path[:dot], path[dot:] + + return path, "" + +def updrive(path) -> str: + """Make the drive letter (if any) upper case. + + This is useful because Windows is inconsistent on the case + of the drive letter, which can cause inconsistencies when + calculating command signatures. + """ + drive, rest = os.path.splitdrive(path) + if drive: + path = drive.upper() + rest + return path + +class NodeList(UserList): + """A list of Nodes with special attribute retrieval. + + Unlike an ordinary list, access to a member's attribute returns a + `NodeList` containing the same attribute for each member. Although + this can hold any object, it is intended for use when processing + Nodes, where fetching an attribute of each member is very commone, + for example getting the content signature of each node. The term + "attribute" here includes the string representation. + + >>> someList = NodeList([' foo ', ' bar ']) + >>> someList.strip() + ['foo', 'bar'] + """ + + def __bool__(self): + return bool(self.data) + + def __str__(self): + return ' '.join(map(str, self.data)) + + def __iter__(self): + return iter(self.data) + + def __call__(self, *args, **kwargs) -> 'NodeList': + result = [x(*args, **kwargs) for x in self.data] + return self.__class__(result) + + def __getattr__(self, name) -> 'NodeList': + """Returns a NodeList of `name` from each member.""" + result = [getattr(x, name) for x in self.data] + return self.__class__(result) + + def __getitem__(self, index): + """Returns one item, forces a `NodeList` if `index` is a slice.""" + # TODO: annotate return how? Union[] - don't know type of single item + if isinstance(index, slice): + return self.__class__(self.data[index]) + return self.data[index] + + +class DisplayEngine: + """A callable class used to display SCons messages.""" + + print_it = True + + def __call__(self, text, append_newline=1): + if not self.print_it: + return + + if append_newline: + text = text + '\n' + + # Stdout might be connected to a pipe that has been closed + # by now. The most likely reason for the pipe being closed + # is that the user has press ctrl-c. It this is the case, + # then SCons is currently shutdown. We therefore ignore + # IOError's here so that SCons can continue and shutdown + # properly so that the .sconsign is correctly written + # before SCons exits. + with suppress(IOError): + sys.stdout.write(str(text)) + + def set_mode(self, mode): + self.print_it = mode + +display = DisplayEngine() + + +# TODO: W0102: Dangerous default value [] as argument (dangerous-default-value) +def render_tree(root, child_func, prune=0, margin=[0], visited=None) -> str: + """Render a tree of nodes into an ASCII tree view. + + Args: + root: the root node of the tree + child_func: the function called to get the children of a node + prune: don't visit the same node twice + margin: the format of the left margin to use for children of `root`. + 1 results in a pipe, and 0 results in no pipe. + visited: a dictionary of visited nodes in the current branch if + `prune` is 0, or in the whole tree if `prune` is 1. + """ + + rname = str(root) + + # Initialize 'visited' dict, if required + if visited is None: + visited = {} + + children = child_func(root) + retval = "" + for pipe in margin[:-1]: + if pipe: + retval = retval + "| " + else: + retval = retval + " " + + if rname in visited: + return retval + "+-[" + rname + "]\n" + + retval = retval + "+-" + rname + "\n" + if not prune: + visited = copy.copy(visited) + visited[rname] = True + + for i, child in enumerate(children): + margin.append(i < len(children)-1) + retval = retval + render_tree(child, child_func, prune, margin, visited) + margin.pop() + + return retval + +def IDX(n) -> bool: + """Generate in index into strings from the tree legends. + + These are always a choice between two, so bool works fine. + """ + return bool(n) + +# unicode line drawing chars: +BOX_HORIZ = chr(0x2500) # '─' +BOX_VERT = chr(0x2502) # '│' +BOX_UP_RIGHT = chr(0x2514) # '└' +BOX_DOWN_RIGHT = chr(0x250c) # '┌' +BOX_DOWN_LEFT = chr(0x2510) # '┐' +BOX_UP_LEFT = chr(0x2518) # '┘' +BOX_VERT_RIGHT = chr(0x251c) # '├' +BOX_HORIZ_DOWN = chr(0x252c) # '┬' + + +# TODO: W0102: Dangerous default value [] as argument (dangerous-default-value) +def print_tree( + root, + child_func, + prune=0, + showtags=False, + margin=[0], + visited=None, + lastChild: bool = False, + singleLineDraw: bool = False, +) -> None: + """Print a tree of nodes. + + This is like func:`render_tree`, except it prints lines directly instead + of creating a string representation in memory, so that huge trees can + be handled. + + Args: + root: the root node of the tree + child_func: the function called to get the children of a node + prune: don't visit the same node twice + showtags: print status information to the left of each node line + margin: the format of the left margin to use for children of *root*. + 1 results in a pipe, and 0 results in no pipe. + visited: a dictionary of visited nodes in the current branch if + *prune* is 0, or in the whole tree if *prune* is 1. + lastChild: this is the last leaf of a branch + singleLineDraw: use line-drawing characters rather than ASCII. + """ + + rname = str(root) + + # Initialize 'visited' dict, if required + if visited is None: + visited = {} + + if showtags: + + if showtags == 2: + legend = (' E = exists\n' + + ' R = exists in repository only\n' + + ' b = implicit builder\n' + + ' B = explicit builder\n' + + ' S = side effect\n' + + ' P = precious\n' + + ' A = always build\n' + + ' C = current\n' + + ' N = no clean\n' + + ' H = no cache\n' + + '\n') + sys.stdout.write(legend) + + tags = [ + '[', + ' E'[IDX(root.exists())], + ' R'[IDX(root.rexists() and not root.exists())], + ' BbB'[ + [0, 1][IDX(root.has_explicit_builder())] + + [0, 2][IDX(root.has_builder())] + ], + ' S'[IDX(root.side_effect)], + ' P'[IDX(root.precious)], + ' A'[IDX(root.always_build)], + ' C'[IDX(root.is_up_to_date())], + ' N'[IDX(root.noclean)], + ' H'[IDX(root.nocache)], + ']' + ] + + else: + tags = [] + + def MMM(m): + if singleLineDraw: + return [" ", BOX_VERT + " "][m] + + return [" ", "| "][m] + + margins = list(map(MMM, margin[:-1])) + children = child_func(root) + cross = "+-" + if singleLineDraw: + cross = BOX_VERT_RIGHT + BOX_HORIZ # sign used to point to the leaf. + # check if this is the last leaf of the branch + if lastChild: + # if this if the last leaf, then terminate: + cross = BOX_UP_RIGHT + BOX_HORIZ # sign for the last leaf + + # if this branch has children then split it + if children: + # if it's a leaf: + if prune and rname in visited and children: + cross += BOX_HORIZ + else: + cross += BOX_HORIZ_DOWN + + if prune and rname in visited and children: + sys.stdout.write(''.join(tags + margins + [cross, '[', rname, ']']) + '\n') + return + + sys.stdout.write(''.join(tags + margins + [cross, rname]) + '\n') + + visited[rname] = 1 + + # if this item has children: + if children: + margin.append(1) # Initialize margin with 1 for vertical bar. + idx = IDX(showtags) + _child = 0 # Initialize this for the first child. + for C in children[:-1]: + _child = _child + 1 # number the children + print_tree( + C, + child_func, + prune, + idx, + margin, + visited, + (len(children) - _child) <= 0, + singleLineDraw, + ) + # margins are with space (index 0) because we arrived to the last child. + margin[-1] = 0 + # for this call child and nr of children needs to be set 0, to signal the second phase. + print_tree(children[-1], child_func, prune, idx, margin, visited, True, singleLineDraw) + margin.pop() # destroy the last margin added + + +def do_flatten( + sequence, + result, + isinstance=isinstance, + StringTypes=StringTypes, + SequenceTypes=SequenceTypes, +): # pylint: disable=redefined-outer-name,redefined-builtin + for item in sequence: + if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes): + result.append(item) + else: + do_flatten(item, result) + + +def flatten( # pylint: disable=redefined-outer-name,redefined-builtin + obj, + isinstance=isinstance, + StringTypes=StringTypes, + SequenceTypes=SequenceTypes, + do_flatten=do_flatten, +) -> list: + """Flatten a sequence to a non-nested list. + + Converts either a single scalar or a nested sequence to a non-nested list. + Note that :func:`flatten` considers strings + to be scalars instead of sequences like pure Python would. + """ + if isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes): + return [obj] + result = [] + for item in obj: + if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes): + result.append(item) + else: + do_flatten(item, result) + return result + + +def flatten_sequence( # pylint: disable=redefined-outer-name,redefined-builtin + sequence, + isinstance=isinstance, + StringTypes=StringTypes, + SequenceTypes=SequenceTypes, + do_flatten=do_flatten, +) -> list: + """Flatten a sequence to a non-nested list. + + Same as :func:`flatten`, but it does not handle the single scalar case. + This is slightly more efficient when one knows that the sequence + to flatten can not be a scalar. + """ + result = [] + for item in sequence: + if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes): + result.append(item) + else: + do_flatten(item, result) + return result + + +# The SCons "semi-deep" copy. +# +# This makes separate copies of lists (including UserList objects) +# dictionaries (including UserDict objects) and tuples, but just copies +# references to anything else it finds. +# +# A special case is any object that has a __semi_deepcopy__() method, +# which we invoke to create the copy. Currently only used by +# BuilderDict to actually prevent the copy operation (as invalid on that object). +# +# The dispatch table approach used here is a direct rip-off from the +# normal Python copy module. + +def semi_deepcopy_dict(obj, exclude=None) -> dict: + if exclude is None: + exclude = [] + return {k: semi_deepcopy(v) for k, v in obj.items() if k not in exclude} + +def _semi_deepcopy_list(obj) -> list: + return [semi_deepcopy(item) for item in obj] + +def _semi_deepcopy_tuple(obj) -> tuple: + return tuple(map(semi_deepcopy, obj)) + +_semi_deepcopy_dispatch = { + dict: semi_deepcopy_dict, + list: _semi_deepcopy_list, + tuple: _semi_deepcopy_tuple, +} + + +def semi_deepcopy(obj): + copier = _semi_deepcopy_dispatch.get(type(obj)) + if copier: + return copier(obj) + + if hasattr(obj, '__semi_deepcopy__') and callable(obj.__semi_deepcopy__): + return obj.__semi_deepcopy__() + + if isinstance(obj, UserDict): + return obj.__class__(semi_deepcopy_dict(obj)) + + if isinstance(obj, (UserList, deque)): + return obj.__class__(_semi_deepcopy_list(obj)) + + return obj + + +class Proxy: + """A simple generic Proxy class, forwarding all calls to subject. + + This means you can take an object, let's call it `'obj_a`, + and wrap it in this Proxy class, with a statement like this:: + + proxy_obj = Proxy(obj_a) + + Then, if in the future, you do something like this:: + + x = proxy_obj.var1 + + since the :class:`Proxy` class does not have a :attr:`var1` attribute + (but presumably `objA` does), the request actually is equivalent to saying:: + + x = obj_a.var1 + + Inherit from this class to create a Proxy. + + With Python 3.5+ this does *not* work transparently + for :class:`Proxy` subclasses that use special .__*__() method names, + because those names are now bound to the class, not the individual + instances. You now need to know in advance which special method names you + want to pass on to the underlying Proxy object, and specifically delegate + their calls like this:: + + class Foo(Proxy): + __str__ = Delegate('__str__') + """ + + def __init__(self, subject): + """Wrap an object as a Proxy object""" + self._subject = subject + + def __getattr__(self, name): + """Retrieve an attribute from the wrapped object. + + Raises: + AttributeError: if attribute `name` doesn't exist. + """ + return getattr(self._subject, name) + + def get(self): + """Retrieve the entire wrapped object""" + return self._subject + + def __eq__(self, other): + if issubclass(other.__class__, self._subject.__class__): + return self._subject == other + return self.__dict__ == other.__dict__ + + +class Delegate: + """A Python Descriptor class that delegates attribute fetches + to an underlying wrapped subject of a Proxy. Typical use:: + + class Foo(Proxy): + __str__ = Delegate('__str__') + """ + def __init__(self, attribute): + self.attribute = attribute + + def __get__(self, obj, cls): + if isinstance(obj, cls): + return getattr(obj._subject, self.attribute) + + return self + + +# attempt to load the windows registry module: +can_read_reg = False +try: + import winreg + + can_read_reg = True + hkey_mod = winreg + +except ImportError: + class _NoError(Exception): + pass + RegError = _NoError + +if can_read_reg: + HKEY_CLASSES_ROOT = hkey_mod.HKEY_CLASSES_ROOT + HKEY_LOCAL_MACHINE = hkey_mod.HKEY_LOCAL_MACHINE + HKEY_CURRENT_USER = hkey_mod.HKEY_CURRENT_USER + HKEY_USERS = hkey_mod.HKEY_USERS + + RegOpenKeyEx = winreg.OpenKeyEx + RegEnumKey = winreg.EnumKey + RegEnumValue = winreg.EnumValue + RegQueryValueEx = winreg.QueryValueEx + RegError = winreg.error + + def RegGetValue(root, key): + r"""Returns a registry value without having to open the key first. + + Only available on Windows platforms with a version of Python that + can read the registry. + + Returns the same thing as :func:`RegQueryValueEx`, except you just + specify the entire path to the value, and don't have to bother + opening the key first. So, instead of:: + + k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, + r'SOFTWARE\Microsoft\Windows\CurrentVersion') + out = SCons.Util.RegQueryValueEx(k, 'ProgramFilesDir') + + You can write:: + + out = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE, + r'SOFTWARE\Microsoft\Windows\CurrentVersion\ProgramFilesDir') + """ + # I would use os.path.split here, but it's not a filesystem + # path... + p = key.rfind('\\') + 1 + keyp = key[: p - 1] # -1 to omit trailing slash + val = key[p:] + k = RegOpenKeyEx(root, keyp) + return RegQueryValueEx(k, val) + + +else: + HKEY_CLASSES_ROOT = None + HKEY_LOCAL_MACHINE = None + HKEY_CURRENT_USER = None + HKEY_USERS = None + + def RegGetValue(root, key): + raise OSError + + def RegOpenKeyEx(root, key): + raise OSError + + +if sys.platform == 'win32': + + def WhereIs(file, path=None, pathext=None, reject=None) -> Optional[str]: + if path is None: + try: + path = os.environ['PATH'] + except KeyError: + return None + if is_String(path): + path = path.split(os.pathsep) + if pathext is None: + try: + pathext = os.environ['PATHEXT'] + except KeyError: + pathext = '.COM;.EXE;.BAT;.CMD' + if is_String(pathext): + pathext = pathext.split(os.pathsep) + for ext in pathext: + if ext.lower() == file[-len(ext):].lower(): + pathext = [''] + break + if reject is None: + reject = [] + if not is_List(reject) and not is_Tuple(reject): + reject = [reject] + for p in path: + f = os.path.join(p, file) + for ext in pathext: + fext = f + ext + if os.path.isfile(fext): + try: + reject.index(fext) + except ValueError: + return os.path.normpath(fext) + continue + return None + +elif os.name == 'os2': + + def WhereIs(file, path=None, pathext=None, reject=None) -> Optional[str]: + if path is None: + try: + path = os.environ['PATH'] + except KeyError: + return None + if is_String(path): + path = path.split(os.pathsep) + if pathext is None: + pathext = ['.exe', '.cmd'] + for ext in pathext: + if ext.lower() == file[-len(ext):].lower(): + pathext = [''] + break + if reject is None: + reject = [] + if not is_List(reject) and not is_Tuple(reject): + reject = [reject] + for p in path: + f = os.path.join(p, file) + for ext in pathext: + fext = f + ext + if os.path.isfile(fext): + try: + reject.index(fext) + except ValueError: + return os.path.normpath(fext) + continue + return None + +else: + + def WhereIs(file, path=None, pathext=None, reject=None) -> Optional[str]: + import stat # pylint: disable=import-outside-toplevel + + if path is None: + try: + path = os.environ['PATH'] + except KeyError: + return None + if is_String(path): + path = path.split(os.pathsep) + if reject is None: + reject = [] + if not is_List(reject) and not is_Tuple(reject): + reject = [reject] + for p in path: + f = os.path.join(p, file) + if os.path.isfile(f): + try: + st = os.stat(f) + except OSError: + # os.stat() raises OSError, not IOError if the file + # doesn't exist, so in this case we let IOError get + # raised so as to not mask possibly serious disk or + # network issues. + continue + if stat.S_IMODE(st[stat.ST_MODE]) & 0o111: + try: + reject.index(f) + except ValueError: + return os.path.normpath(f) + continue + return None + +WhereIs.__doc__ = """\ +Return the path to an executable that matches `file`. + +Searches the given `path` for `file`, respecting any filename +extensions `pathext` (on the Windows platform only), and +returns the full path to the matching command. If no +command is found, return ``None``. + +If `path` is not specified, :attr:`os.environ[PATH]` is used. +If `pathext` is not specified, :attr:`os.environ[PATHEXT]` +is used. Will not select any path name or names in the optional +`reject` list. +""" + + +if sys.platform == 'cygwin': + import subprocess # pylint: disable=import-outside-toplevel + + def get_native_path(path) -> str: + cp = subprocess.run(('cygpath', '-w', path), check=False, stdout=subprocess.PIPE) + return cp.stdout.decode().replace('\n', '') +else: + def get_native_path(path) -> str: + return path + +get_native_path.__doc__ = """\ +Transform an absolute path into a native path for the system. + +In Cygwin, this converts from a Cygwin path to a Windows path, +without regard to whether `path` refers to an existing file +system object. For other platforms, `path` is unchanged. +""" + + +def Split(arg) -> list: + """Returns a list of file names or other objects. + + If `arg` is a string, it will be split on strings of white-space + characters within the string. If `arg` is already a list, the list + will be returned untouched. If `arg` is any other type of object, + it will be returned as a list containing just the object. + + >>> print(Split(" this is a string ")) + ['this', 'is', 'a', 'string'] + >>> print(Split(["stringlist", " preserving ", " spaces "])) + ['stringlist', ' preserving ', ' spaces '] + """ + if is_List(arg) or is_Tuple(arg): + return arg + + if is_String(arg): + return arg.split() + + return [arg] + + +class CLVar(UserList): + """A container for command-line construction variables. + + Forces the use of a list of strings intended as command-line + arguments. Like :class:`collections.UserList`, but the argument + passed to the initializter will be processed by the :func:`Split` + function, which includes special handling for string types: they + will be split into a list of words, not coereced directly to a list. + The same happens if a string is added to a :class:`CLVar`, + which allows doing the right thing with both + :func:`Append`/:func:`Prepend` methods, + as well as with pure Python addition, regardless of whether adding + a list or a string to a construction variable. + + Side effect: spaces will be stripped from individual string + arguments. If you need spaces preserved, pass strings containing + spaces inside a list argument. + + >>> u = UserList("--some --opts and args") + >>> print(len(u), repr(u)) + 22 ['-', '-', 's', 'o', 'm', 'e', ' ', '-', '-', 'o', 'p', 't', 's', ' ', 'a', 'n', 'd', ' ', 'a', 'r', 'g', 's'] + >>> c = CLVar("--some --opts and args") + >>> print(len(c), repr(c)) + 4 ['--some', '--opts', 'and', 'args'] + >>> c += " strips spaces " + >>> print(len(c), repr(c)) + 6 ['--some', '--opts', 'and', 'args', 'strips', 'spaces'] + """ + + def __init__(self, initlist=None): + super().__init__(Split(initlist if initlist is not None else [])) + + def __add__(self, other): + return super().__add__(CLVar(other)) + + def __radd__(self, other): + return super().__radd__(CLVar(other)) + + def __iadd__(self, other): + return super().__iadd__(CLVar(other)) + + def __str__(self): + # Some cases the data can contain Nodes, so make sure they + # processed to string before handing them over to join. + return ' '.join([str(d) for d in self.data]) + + +class Selector(OrderedDict): + """A callable ordered dictionary that maps file suffixes to + dictionary values. We preserve the order in which items are added + so that :func:`get_suffix` calls always return the first suffix added. + """ + def __call__(self, env, source, ext=None): + if ext is None: + try: + ext = source[0].get_suffix() + except IndexError: + ext = "" + try: + return self[ext] + except KeyError: + # Try to perform Environment substitution on the keys of + # the dictionary before giving up. + s_dict = {} + for (k, v) in self.items(): + if k is not None: + s_k = env.subst(k) + if s_k in s_dict: + # We only raise an error when variables point + # to the same suffix. If one suffix is literal + # and a variable suffix contains this literal, + # the literal wins and we don't raise an error. + raise KeyError(s_dict[s_k][0], k, s_k) + s_dict[s_k] = (k, v) + try: + return s_dict[ext][1] + except KeyError: + try: + return self[None] + except KeyError: + return None + + +if sys.platform == 'cygwin': + # On Cygwin, os.path.normcase() lies, so just report back the + # fact that the underlying Windows OS is case-insensitive. + def case_sensitive_suffixes(s1, s2) -> bool: # pylint: disable=unused-argument + return False + +else: + def case_sensitive_suffixes(s1, s2) -> bool: + return os.path.normcase(s1) != os.path.normcase(s2) + + +def adjustixes(fname, pre, suf, ensure_suffix=False) -> str: + """Adjust filename prefixes and suffixes as needed. + + Add `prefix` to `fname` if specified. + Add `suffix` to `fname` if specified and if `ensure_suffix` is ``True`` + """ + + if pre: + path, fn = os.path.split(os.path.normpath(fname)) + + # Handle the odd case where the filename = the prefix. + # In that case, we still want to add the prefix to the file + if not fn.startswith(pre) or fn == pre: + fname = os.path.join(path, pre + fn) + # Only append a suffix if the suffix we're going to add isn't already + # there, and if either we've been asked to ensure the specific suffix + # is present or there's no suffix on it at all. + # Also handle the odd case where the filename = the suffix. + # in that case we still want to append the suffix + if suf and not fname.endswith(suf) and \ + (ensure_suffix or not splitext(fname)[1]): + fname = fname + suf + return fname + + +# From Tim Peters, +# https://code.activestate.com/recipes/52560 +# ASPN: Python Cookbook: Remove duplicates from a sequence +# (Also in the printed Python Cookbook.) +# Updated. This algorithm is used by some scanners and tools. + +def unique(seq): + """Return a list of the elements in seq without duplicates, ignoring order. + + >>> mylist = unique([1, 2, 3, 1, 2, 3]) + >>> print(sorted(mylist)) + [1, 2, 3] + >>> mylist = unique("abcabc") + >>> print(sorted(mylist)) + ['a', 'b', 'c'] + >>> mylist = unique(([1, 2], [2, 3], [1, 2])) + >>> print(sorted(mylist)) + [[1, 2], [2, 3]] + + For best speed, all sequence elements should be hashable. Then + unique() will usually work in linear time. + + If not possible, the sequence elements should enjoy a total + ordering, and if list(s).sort() doesn't raise TypeError it's + assumed that they do enjoy a total ordering. Then unique() will + usually work in O(N*log2(N)) time. + + If that's not possible either, the sequence elements must support + equality-testing. Then unique() will usually work in quadratic time. + """ + + if not seq: + return [] + + # Try using a dict first, as that's the fastest and will usually + # work. If it doesn't work, it will usually fail quickly, so it + # usually doesn't cost much to *try* it. It requires that all the + # sequence elements be hashable, and support equality comparison. + # TODO: should be even faster: return(list(set(seq))) + with suppress(TypeError): + return list(dict.fromkeys(seq)) + + # We couldn't hash all the elements (got a TypeError). + # Next fastest is to sort, which brings the equal elements together; + # then duplicates are easy to weed out in a single pass. + # NOTE: Python's list.sort() was designed to be efficient in the + # presence of many duplicate elements. This isn't true of all + # sort functions in all languages or libraries, so this approach + # is more effective in Python than it may be elsewhere. + n = len(seq) + try: + t = sorted(seq) + except TypeError: + pass # move on to the next method + else: + last = t[0] + lasti = i = 1 + while i < n: + if t[i] != last: + t[lasti] = last = t[i] + lasti = lasti + 1 + i = i + 1 + return t[:lasti] + + # Brute force is all that's left. + u = [] + for x in seq: + if x not in u: + u.append(x) + return u + +# Best way (assuming Python 3.7, but effectively 3.6) to remove +# duplicates from a list in while preserving order, according to +# https://stackoverflow.com/questions/480214/how-do-i-remove-duplicates-from-a-list-while-preserving-order/17016257#17016257 +def uniquer_hashables(seq): + return list(dict.fromkeys(seq)) + +# Recipe 19.11 "Reading Lines with Continuation Characters", +# by Alex Martelli, straight from the Python CookBook (2nd edition). +def logical_lines(physical_lines, joiner=''.join): + logical_line = [] + for line in physical_lines: + stripped = line.rstrip() + if stripped.endswith('\\'): + # a line which continues w/the next physical line + logical_line.append(stripped[:-1]) + else: + # a line which does not continue, end of logical line + logical_line.append(line) + yield joiner(logical_line) + logical_line = [] + if logical_line: + # end of sequence implies end of last logical line + yield joiner(logical_line) + + +class LogicalLines: + """ Wrapper class for the logical_lines method. + + Allows us to read all "logical" lines at once from a given file object. + """ + + def __init__(self, fileobj): + self.fileobj = fileobj + + def readlines(self): + return list(logical_lines(self.fileobj)) + + +class UniqueList(UserList): + """A list which maintains uniqueness. + + Uniquing is lazy: rather than being assured on list changes, it is fixed + up on access by those methods which need to act on a unique list to be + correct. That means things like "in" don't have to eat the uniquing time. + """ + def __init__(self, initlist=None): + super().__init__(initlist) + self.unique = True + + def __make_unique(self): + if not self.unique: + self.data = uniquer_hashables(self.data) + self.unique = True + + def __repr__(self): + self.__make_unique() + return super().__repr__() + + def __lt__(self, other): + self.__make_unique() + return super().__lt__(other) + + def __le__(self, other): + self.__make_unique() + return super().__le__(other) + + def __eq__(self, other): + self.__make_unique() + return super().__eq__(other) + + def __ne__(self, other): + self.__make_unique() + return super().__ne__(other) + + def __gt__(self, other): + self.__make_unique() + return super().__gt__(other) + + def __ge__(self, other): + self.__make_unique() + return super().__ge__(other) + + # __contains__ doesn't need to worry about uniquing, inherit + + def __len__(self): + self.__make_unique() + return super().__len__() + + def __getitem__(self, i): + self.__make_unique() + return super().__getitem__(i) + + def __setitem__(self, i, item): + super().__setitem__(i, item) + self.unique = False + + # __delitem__ doesn't need to worry about uniquing, inherit + + def __add__(self, other): + result = super().__add__(other) + result.unique = False + return result + + def __radd__(self, other): + result = super().__radd__(other) + result.unique = False + return result + + def __iadd__(self, other): + result = super().__iadd__(other) + result.unique = False + return result + + def __mul__(self, other): + result = super().__mul__(other) + result.unique = False + return result + + def __rmul__(self, other): + result = super().__rmul__(other) + result.unique = False + return result + + def __imul__(self, other): + result = super().__imul__(other) + result.unique = False + return result + + def append(self, item): + super().append(item) + self.unique = False + + def insert(self, i, item): + super().insert(i, item) + self.unique = False + + def count(self, item): + self.__make_unique() + return super().count(item) + + def index(self, item, *args): + self.__make_unique() + return super().index(item, *args) + + def reverse(self): + self.__make_unique() + super().reverse() + + # TODO: Py3.8: def sort(self, /, *args, **kwds): + def sort(self, *args, **kwds): + self.__make_unique() + return super().sort(*args, **kwds) + + def extend(self, other): + super().extend(other) + self.unique = False + + +class Unbuffered: + """A proxy that wraps a file object, flushing after every write. + + Delegates everything else to the wrapped object. + """ + def __init__(self, file): + self.file = file + + def write(self, arg): + # Stdout might be connected to a pipe that has been closed + # by now. The most likely reason for the pipe being closed + # is that the user has press ctrl-c. It this is the case, + # then SCons is currently shutdown. We therefore ignore + # IOError's here so that SCons can continue and shutdown + # properly so that the .sconsign is correctly written + # before SCons exits. + with suppress(IOError): + self.file.write(arg) + self.file.flush() + + def writelines(self, arg): + with suppress(IOError): + self.file.writelines(arg) + self.file.flush() + + def __getattr__(self, attr): + return getattr(self.file, attr) + +def make_path_relative(path) -> str: + """Converts an absolute path name to a relative pathname.""" + + if os.path.isabs(path): + drive_s, path = os.path.splitdrive(path) + + if not drive_s: + path = re.compile(r"/*(.*)").findall(path)[0] + else: + path = path[1:] + + assert not os.path.isabs(path), path + return path + + +def silent_intern(x): + """ + Perform :mod:`sys.intern` on the passed argument and return the result. + If the input is ineligible for interning the original argument is + returned and no exception is thrown. + """ + try: + return sys.intern(x) + except TypeError: + return x + + +def cmp(a, b) -> bool: + """A cmp function because one is no longer available in python3.""" + return (a > b) - (a < b) + + +def print_time(): + """Hack to return a value from Main if can't import Main.""" + # pylint: disable=redefined-outer-name,import-outside-toplevel + from SCons.Script.Main import print_time + return print_time + + +def wait_for_process_to_die(pid): + """ + Wait for specified process to die, or alternatively kill it + NOTE: This function operates best with psutil pypi package + TODO: Add timeout which raises exception + """ + # wait for the process to fully killed + try: + import psutil # pylint: disable=import-outside-toplevel + while True: + if pid not in [proc.pid for proc in psutil.process_iter()]: + break + time.sleep(0.1) + except ImportError: + # if psutil is not installed we can do this the hard way + while True: + if sys.platform == 'win32': + import ctypes # pylint: disable=import-outside-toplevel + PROCESS_QUERY_INFORMATION = 0x1000 + processHandle = ctypes.windll.kernel32.OpenProcess(PROCESS_QUERY_INFORMATION, 0, pid) + if processHandle == 0: + break + ctypes.windll.kernel32.CloseHandle(processHandle) + time.sleep(0.1) + else: + try: + os.kill(pid, 0) + except OSError: + break + time.sleep(0.1) + +# From: https://stackoverflow.com/questions/1741972/how-to-use-different-formatters-with-the-same-logging-handler-in-python +class DispatchingFormatter(Formatter): + + def __init__(self, formatters, default_formatter): + self._formatters = formatters + self._default_formatter = default_formatter + + def format(self, record): + formatter = self._formatters.get(record.name, self._default_formatter) + return formatter.format(record) + + +def sanitize_shell_env(execution_env: dict) -> dict: + """Sanitize all values in *execution_env* + + The execution environment (typically comes from (env['ENV']) is + propagated to the shell, and may need to be cleaned first. + + Args: + execution_env: The shell environment variables to be propagated + to the spawned shell. + + Returns: + sanitized dictionary of env variables (similar to what you'd get + from :data:`os.environ`) + """ + # Ensure that the ENV values are all strings: + new_env = {} + for key, value in execution_env.items(): + if is_List(value): + # If the value is a list, then we assume it is a path list, + # because that's a pretty common list-like value to stick + # in an environment variable: + value = flatten_sequence(value) + new_env[key] = os.pathsep.join(map(str, value)) + else: + # It's either a string or something else. If it isn't a + # string or a list, then we just coerce it to a string, which + # is the proper way to handle Dir and File instances and will + # produce something reasonable for just about everything else: + new_env[key] = str(value) + return new_env + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/SCons/Util/types.py scons-4.5.2+dfsg/SCons/Util/types.py --- scons-4.4.0+dfsg/SCons/Util/types.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Util/types.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,315 @@ +# SPDX-License-Identifier: MIT +# +# Copyright The SCons Foundation + +"""Various SCons utility functions + +Routines which check types and do type conversions. +""" + +import os +import pprint +import re +from typing import Optional + +from collections import UserDict, UserList, UserString, deque +from collections.abc import MappingView, Iterable + +# Functions for deciding if things are like various types, mainly to +# handle UserDict, UserList and UserString like their underlying types. +# +# Yes, all of this manual testing breaks polymorphism, and the real +# Pythonic way to do all of this would be to just try it and handle the +# exception, but handling the exception when it's not the right type is +# often too slow. + +# A trick is used to speed up these functions. Default arguments are +# used to take a snapshot of the global functions and constants used +# by these functions. This transforms accesses to global variables into +# local variable accesses (i.e. LOAD_FAST instead of LOAD_GLOBAL). +# Since checkers dislike this, it's now annotated for pylint, to flag +# (mostly for other readers of this code) we're doing this intentionally. +# TODO: experts affirm this is still faster, but maybe check if worth it? + +DictTypes = (dict, UserDict) +ListTypes = (list, UserList, deque) + +# With Python 3, there are view types that are sequences. Other interesting +# sequences are range and bytearray. What we don't want is strings: while +# they are iterable sequences, in SCons usage iterating over a string is +# almost never what we want. So basically iterable-but-not-string: +SequenceTypes = (list, tuple, deque, UserList, MappingView) + +# Note that profiling data shows a speed-up when comparing +# explicitly with str instead of simply comparing +# with basestring. (at least on Python 2.5.1) +# TODO: PY3 check this benchmarking is still correct. +StringTypes = (str, UserString) + +# Empirically, it is faster to check explicitly for str than for basestring. +BaseStringTypes = str + + +def is_Dict( # pylint: disable=redefined-outer-name,redefined-builtin + obj, isinstance=isinstance, DictTypes=DictTypes +) -> bool: + """Check if object is a dict.""" + return isinstance(obj, DictTypes) + + +def is_List( # pylint: disable=redefined-outer-name,redefined-builtin + obj, isinstance=isinstance, ListTypes=ListTypes +) -> bool: + """Check if object is a list.""" + return isinstance(obj, ListTypes) + + +def is_Sequence( # pylint: disable=redefined-outer-name,redefined-builtin + obj, isinstance=isinstance, SequenceTypes=SequenceTypes +) -> bool: + """Check if object is a sequence.""" + return isinstance(obj, SequenceTypes) + + +def is_Tuple( # pylint: disable=redefined-builtin + obj, isinstance=isinstance, tuple=tuple +) -> bool: + """Check if object is a tuple.""" + return isinstance(obj, tuple) + + +def is_String( # pylint: disable=redefined-outer-name,redefined-builtin + obj, isinstance=isinstance, StringTypes=StringTypes +) -> bool: + """Check if object is a string.""" + return isinstance(obj, StringTypes) + + +def is_Scalar( # pylint: disable=redefined-outer-name,redefined-builtin + obj, isinstance=isinstance, StringTypes=StringTypes, Iterable=Iterable, +) -> bool: + """Check if object is a scalar: not a container or iterable.""" + # Profiling shows that there is an impressive speed-up of 2x + # when explicitly checking for strings instead of just not + # sequence when the argument (i.e. obj) is already a string. + # But, if obj is a not string then it is twice as fast to + # check only for 'not sequence'. The following code therefore + # assumes that the obj argument is a string most of the time. + # Update: now using collections.abc.Iterable for the 2nd check. + # Note: None is considered a "scalar" for this check, which is correct + # for the usage in SCons.Environment._add_cppdefines. + return isinstance(obj, StringTypes) or not isinstance(obj, Iterable) + + +# From Dinu C. Gherman, +# Python Cookbook, second edition, recipe 6.17, p. 277. +# Also: https://code.activestate.com/recipes/68205 +# ASPN: Python Cookbook: Null Object Design Pattern + + +class Null: + """Null objects always and reliably 'do nothing'.""" + + def __new__(cls, *args, **kwargs): + if '_instance' not in vars(cls): + cls._instance = super(Null, cls).__new__(cls, *args, **kwargs) + return cls._instance + + def __init__(self, *args, **kwargs): + pass + + def __call__(self, *args, **kwargs): + return self + + def __repr__(self): + return f"Null(0x{id(self):08X})" + + def __bool__(self): + return False + + def __getattr__(self, name): + return self + + def __setattr__(self, name, value): + return self + + def __delattr__(self, name): + return self + + +class NullSeq(Null): + """A Null object that can also be iterated over.""" + + def __len__(self): + return 0 + + def __iter__(self): + return iter(()) + + def __getitem__(self, i): + return self + + def __delitem__(self, i): + return self + + def __setitem__(self, i, v): + return self + + +def to_bytes(s) -> bytes: + """Convert object to bytes.""" + if s is None: + return b'None' + if isinstance(s, (bytes, bytearray)): + # if already bytes return. + return s + return bytes(s, 'utf-8') + + +def to_str(s) -> str: + """Convert object to string.""" + if s is None: + return 'None' + if is_String(s): + return s + return str(s, 'utf-8') + + +# Generic convert-to-string functions. The wrapper +# to_String_for_signature() will use a for_signature() method if the +# specified object has one. + + +def to_String( # pylint: disable=redefined-outer-name,redefined-builtin + obj, + isinstance=isinstance, + str=str, + UserString=UserString, + BaseStringTypes=BaseStringTypes, +) -> str: + """Return a string version of obj.""" + if isinstance(obj, BaseStringTypes): + # Early out when already a string! + return obj + + if isinstance(obj, UserString): + # obj.data can only be a regular string. Please see the UserString initializer. + return obj.data + + return str(obj) + + +def to_String_for_subst( # pylint: disable=redefined-outer-name,redefined-builtin + obj, + isinstance=isinstance, + str=str, + BaseStringTypes=BaseStringTypes, + SequenceTypes=SequenceTypes, + UserString=UserString, +) -> str: + """Return a string version of obj for subst usage.""" + # Note that the test cases are sorted by order of probability. + if isinstance(obj, BaseStringTypes): + return obj + + if isinstance(obj, SequenceTypes): + return ' '.join([to_String_for_subst(e) for e in obj]) + + if isinstance(obj, UserString): + # obj.data can only a regular string. Please see the UserString initializer. + return obj.data + + return str(obj) + + +def to_String_for_signature( # pylint: disable=redefined-outer-name,redefined-builtin + obj, to_String_for_subst=to_String_for_subst, AttributeError=AttributeError, +) -> str: + """Return a string version of obj for signature usage. + + Like :func:`to_String_for_subst` but has special handling for + scons objects that have a :meth:`for_signature` method, and for dicts. + """ + try: + f = obj.for_signature + except AttributeError: + if isinstance(obj, dict): + # pprint will output dictionary in key sorted order + # with py3.5 the order was randomized. Depending on dict order + # which was undefined until py3.6 (where it's by insertion order) + # was not wise. + # TODO: Change code when floor is raised to PY36 + return pprint.pformat(obj, width=1000000) + return to_String_for_subst(obj) + else: + return f() + + +def get_env_bool(env, name, default=False) -> bool: + """Convert a construction variable to bool. + + If the value of *name* in *env* is 'true', 'yes', 'y', 'on' (case + insensitive) or anything convertible to int that yields non-zero then + return ``True``; if 'false', 'no', 'n', 'off' (case insensitive) + or a number that converts to integer zero return ``False``. + Otherwise, return `default`. + + Args: + env: construction environment, or any dict-like object + name: name of the variable + default: value to return if *name* not in *env* or cannot + be converted (default: False) + + Returns: + the "truthiness" of `name` + """ + try: + var = env[name] + except KeyError: + return default + try: + return bool(int(var)) + except ValueError: + if str(var).lower() in ('true', 'yes', 'y', 'on'): + return True + + if str(var).lower() in ('false', 'no', 'n', 'off'): + return False + + return default + + +def get_os_env_bool(name, default=False) -> bool: + """Convert an environment variable to bool. + + Conversion is the same as for :func:`get_env_bool`. + """ + return get_env_bool(os.environ, name, default) + + +_get_env_var = re.compile(r'^\$([_a-zA-Z]\w*|{[_a-zA-Z]\w*})$') + + +def get_environment_var(varstr) -> Optional[str]: + """Return undecorated construction variable string. + + Determine if `varstr` looks like a reference + to a single environment variable, like `"$FOO"` or `"${FOO}"`. + If so, return that variable with no decorations, like `"FOO"`. + If not, return `None`. + """ + mo = _get_env_var.match(to_String(varstr)) + if mo: + var = mo.group(1) + if var[0] == '{': + return var[1:-1] + return var + + return None + + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/SCons/Util.py scons-4.5.2+dfsg/SCons/Util.py --- scons-4.4.0+dfsg/SCons/Util.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Util.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,2166 +0,0 @@ -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -"""Various SCons utility functions.""" - -import copy -import hashlib -import os -import pprint -import re -import sys -import time -from collections import UserDict, UserList, UserString, OrderedDict -from collections.abc import MappingView -from contextlib import suppress -from types import MethodType, FunctionType -from typing import Optional, Union - -# Note: Util module cannot import other bits of SCons globally without getting -# into import loops. Both the below modules import SCons.Util early on. -# --> SCons.Warnings -# --> SCons.Errors -# Thus the local imports, which are annotated for pylint to show we mean it. - - -PYPY = hasattr(sys, 'pypy_translation_info') - -# this string will be hashed if a Node refers to a file that doesn't exist -# in order to distinguish from a file that exists but is empty. -NOFILE = "SCONS_MAGIC_MISSING_FILE_STRING" - -# unused? -def dictify(keys, values, result=None) -> dict: - if result is None: - result = {} - result.update(zip(keys, values)) - return result - -_ALTSEP = os.altsep -if _ALTSEP is None and sys.platform == 'win32': - # My ActivePython 2.0.1 doesn't set os.altsep! What gives? - _ALTSEP = '/' -if _ALTSEP: - def rightmost_separator(path, sep): - return max(path.rfind(sep), path.rfind(_ALTSEP)) -else: - def rightmost_separator(path, sep): - return path.rfind(sep) - -# First two from the Python Cookbook, just for completeness. -# (Yeah, yeah, YAGNI...) -def containsAny(s, pat) -> bool: - """Check whether string `s` contains ANY of the items in `pat`.""" - return any(c in s for c in pat) - -def containsAll(s, pat) -> bool: - """Check whether string `s` contains ALL of the items in `pat`.""" - return all(c in s for c in pat) - -def containsOnly(s, pat) -> bool: - """Check whether string `s` contains ONLY items in `pat`.""" - for c in s: - if c not in pat: - return False - return True - - -# TODO: Verify this method is STILL faster than os.path.splitext -def splitext(path) -> tuple: - """Split `path` into a (root, ext) pair. - - Same as :mod:`os.path.splitext` but faster. - """ - sep = rightmost_separator(path, os.sep) - dot = path.rfind('.') - # An ext is only real if it has at least one non-digit char - if dot > sep and not path[dot + 1:].isdigit(): - return path[:dot], path[dot:] - - return path, "" - -def updrive(path) -> str: - """Make the drive letter (if any) upper case. - - This is useful because Windows is inconsistent on the case - of the drive letter, which can cause inconsistencies when - calculating command signatures. - """ - drive, rest = os.path.splitdrive(path) - if drive: - path = drive.upper() + rest - return path - -class NodeList(UserList): - """A list of Nodes with special attribute retrieval. - - Unlike an ordinary list, access to a member's attribute returns a - `NodeList` containing the same attribute for each member. Although - this can hold any object, it is intended for use when processing - Nodes, where fetching an attribute of each member is very commone, - for example getting the content signature of each node. The term - "attribute" here includes the string representation. - - Example: - - >>> someList = NodeList([' foo ', ' bar ']) - >>> someList.strip() - ['foo', 'bar'] - """ - - def __bool__(self): - return bool(self.data) - - def __str__(self): - return ' '.join(map(str, self.data)) - - def __iter__(self): - return iter(self.data) - - def __call__(self, *args, **kwargs) -> 'NodeList': - result = [x(*args, **kwargs) for x in self.data] - return self.__class__(result) - - def __getattr__(self, name) -> 'NodeList': - """Returns a NodeList of `name` from each member.""" - result = [getattr(x, name) for x in self.data] - return self.__class__(result) - - def __getitem__(self, index): - """Returns one item, forces a `NodeList` if `index` is a slice.""" - # TODO: annotate return how? Union[] - don't know type of single item - if isinstance(index, slice): - return self.__class__(self.data[index]) - return self.data[index] - - -_get_env_var = re.compile(r'^\$([_a-zA-Z]\w*|{[_a-zA-Z]\w*})$') - -def get_environment_var(varstr) -> Optional[str]: - """Return undecorated construction variable string. - - Determine if `varstr` looks like a reference - to a single environment variable, like `"$FOO"` or `"${FOO}"`. - If so, return that variable with no decorations, like `"FOO"`. - If not, return `None`. - """ - - mo = _get_env_var.match(to_String(varstr)) - if mo: - var = mo.group(1) - if var[0] == '{': - return var[1:-1] - return var - - return None - - -class DisplayEngine: - """A callable class used to display SCons messages.""" - - print_it = True - - def __call__(self, text, append_newline=1): - if not self.print_it: - return - - if append_newline: - text = text + '\n' - - try: - sys.stdout.write(str(text)) - except IOError: - # Stdout might be connected to a pipe that has been closed - # by now. The most likely reason for the pipe being closed - # is that the user has press ctrl-c. It this is the case, - # then SCons is currently shutdown. We therefore ignore - # IOError's here so that SCons can continue and shutdown - # properly so that the .sconsign is correctly written - # before SCons exits. - pass - - def set_mode(self, mode): - self.print_it = mode - - -# TODO: W0102: Dangerous default value [] as argument (dangerous-default-value) -def render_tree(root, child_func, prune=0, margin=[0], visited=None): - """Render a tree of nodes into an ASCII tree view. - - Args: - root: the root node of the tree - child_func: the function called to get the children of a node - prune: don't visit the same node twice - margin: the format of the left margin to use for children of `root`. - 1 results in a pipe, and 0 results in no pipe. - visited: a dictionary of visited nodes in the current branch if - `prune` is 0, or in the whole tree if `prune` is 1. - """ - - rname = str(root) - - # Initialize 'visited' dict, if required - if visited is None: - visited = {} - - children = child_func(root) - retval = "" - for pipe in margin[:-1]: - if pipe: - retval = retval + "| " - else: - retval = retval + " " - - if rname in visited: - return retval + "+-[" + rname + "]\n" - - retval = retval + "+-" + rname + "\n" - if not prune: - visited = copy.copy(visited) - visited[rname] = True - - for i, child in enumerate(children): - margin.append(i < len(children)-1) - retval = retval + render_tree(child, child_func, prune, margin, visited) - margin.pop() - - return retval - -def IDX(n) -> bool: - """Generate in index into strings from the tree legends. - - These are always a choice between two, so bool works fine. - """ - return bool(n) - -# unicode line drawing chars: -BOX_HORIZ = chr(0x2500) # '─' -BOX_VERT = chr(0x2502) # '│' -BOX_UP_RIGHT = chr(0x2514) # '└' -BOX_DOWN_RIGHT = chr(0x250c) # '┌' -BOX_DOWN_LEFT = chr(0x2510) # '┐' -BOX_UP_LEFT = chr(0x2518) # '┘' -BOX_VERT_RIGHT = chr(0x251c) # '├' -BOX_HORIZ_DOWN = chr(0x252c) # '┬' - - -# TODO: W0102: Dangerous default value [] as argument (dangerous-default-value) -def print_tree( - root, - child_func, - prune=0, - showtags=False, - margin=[0], - visited=None, - lastChild=False, - singleLineDraw=False, -): - """Print a tree of nodes. - - This is like func:`render_tree`, except it prints lines directly instead - of creating a string representation in memory, so that huge trees can - be handled. - - Args: - root: the root node of the tree - child_func: the function called to get the children of a node - prune: don't visit the same node twice - showtags: print status information to the left of each node line - margin: the format of the left margin to use for children of `root`. - 1 results in a pipe, and 0 results in no pipe. - visited: a dictionary of visited nodes in the current branch if - prune` is 0, or in the whole tree if `prune` is 1. - singleLineDraw: use line-drawing characters rather than ASCII. - """ - - rname = str(root) - - # Initialize 'visited' dict, if required - if visited is None: - visited = {} - - if showtags: - - if showtags == 2: - legend = (' E = exists\n' + - ' R = exists in repository only\n' + - ' b = implicit builder\n' + - ' B = explicit builder\n' + - ' S = side effect\n' + - ' P = precious\n' + - ' A = always build\n' + - ' C = current\n' + - ' N = no clean\n' + - ' H = no cache\n' + - '\n') - sys.stdout.write(legend) - - tags = [ - '[', - ' E'[IDX(root.exists())], - ' R'[IDX(root.rexists() and not root.exists())], - ' BbB'[ - [0, 1][IDX(root.has_explicit_builder())] + - [0, 2][IDX(root.has_builder())] - ], - ' S'[IDX(root.side_effect)], - ' P'[IDX(root.precious)], - ' A'[IDX(root.always_build)], - ' C'[IDX(root.is_up_to_date())], - ' N'[IDX(root.noclean)], - ' H'[IDX(root.nocache)], - ']' - ] - - else: - tags = [] - - def MMM(m): - if singleLineDraw: - return [" ", BOX_VERT + " "][m] - - return [" ", "| "][m] - - margins = list(map(MMM, margin[:-1])) - children = child_func(root) - cross = "+-" - if singleLineDraw: - cross = BOX_VERT_RIGHT + BOX_HORIZ # sign used to point to the leaf. - # check if this is the last leaf of the branch - if lastChild: - #if this if the last leaf, then terminate: - cross = BOX_UP_RIGHT + BOX_HORIZ # sign for the last leaf - - # if this branch has children then split it - if children: - # if it's a leaf: - if prune and rname in visited and children: - cross += BOX_HORIZ - else: - cross += BOX_HORIZ_DOWN - - if prune and rname in visited and children: - sys.stdout.write(''.join(tags + margins + [cross,'[', rname, ']']) + '\n') - return - - sys.stdout.write(''.join(tags + margins + [cross, rname]) + '\n') - - visited[rname] = 1 - - # if this item has children: - if children: - margin.append(1) # Initialize margin with 1 for vertical bar. - idx = IDX(showtags) - _child = 0 # Initialize this for the first child. - for C in children[:-1]: - _child = _child + 1 # number the children - print_tree( - C, - child_func, - prune, - idx, - margin, - visited, - (len(children) - _child) <= 0, - singleLineDraw, - ) - # margins are with space (index 0) because we arrived to the last child. - margin[-1] = 0 - # for this call child and nr of children needs to be set 0, to signal the second phase. - print_tree(children[-1], child_func, prune, idx, margin, visited, True, singleLineDraw) - margin.pop() # destroy the last margin added - - -# Functions for deciding if things are like various types, mainly to -# handle UserDict, UserList and UserString like their underlying types. -# -# Yes, all of this manual testing breaks polymorphism, and the real -# Pythonic way to do all of this would be to just try it and handle the -# exception, but handling the exception when it's not the right type is -# often too slow. - -# We are using the following trick to speed up these -# functions. Default arguments are used to take a snapshot of -# the global functions and constants used by these functions. This -# transforms accesses to global variable into local variables -# accesses (i.e. LOAD_FAST instead of LOAD_GLOBAL). -# Since checkers dislike this, it's now annotated for pylint to flag -# (mostly for other readers of this code) we're doing this intentionally. -# TODO: PY3 check these are still valid choices for all of these funcs. - -DictTypes = (dict, UserDict) -ListTypes = (list, UserList) - -# Handle getting dictionary views. -SequenceTypes = (list, tuple, UserList, MappingView) - -# Note that profiling data shows a speed-up when comparing -# explicitly with str instead of simply comparing -# with basestring. (at least on Python 2.5.1) -# TODO: PY3 check this benchmarking is still correct. -StringTypes = (str, UserString) - -# Empirically, it is faster to check explicitly for str than for basestring. -BaseStringTypes = str - -def is_Dict( # pylint: disable=redefined-outer-name,redefined-builtin - obj, isinstance=isinstance, DictTypes=DictTypes -) -> bool: - return isinstance(obj, DictTypes) - - -def is_List( # pylint: disable=redefined-outer-name,redefined-builtin - obj, isinstance=isinstance, ListTypes=ListTypes -) -> bool: - return isinstance(obj, ListTypes) - - -def is_Sequence( # pylint: disable=redefined-outer-name,redefined-builtin - obj, isinstance=isinstance, SequenceTypes=SequenceTypes -) -> bool: - return isinstance(obj, SequenceTypes) - - -def is_Tuple( # pylint: disable=redefined-builtin - obj, isinstance=isinstance, tuple=tuple -) -> bool: - return isinstance(obj, tuple) - - -def is_String( # pylint: disable=redefined-outer-name,redefined-builtin - obj, isinstance=isinstance, StringTypes=StringTypes -) -> bool: - return isinstance(obj, StringTypes) - - -def is_Scalar( # pylint: disable=redefined-outer-name,redefined-builtin - obj, isinstance=isinstance, StringTypes=StringTypes, SequenceTypes=SequenceTypes -) -> bool: - - # Profiling shows that there is an impressive speed-up of 2x - # when explicitly checking for strings instead of just not - # sequence when the argument (i.e. obj) is already a string. - # But, if obj is a not string then it is twice as fast to - # check only for 'not sequence'. The following code therefore - # assumes that the obj argument is a string most of the time. - return isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes) - - -def do_flatten( - sequence, - result, - isinstance=isinstance, - StringTypes=StringTypes, - SequenceTypes=SequenceTypes, -): # pylint: disable=redefined-outer-name,redefined-builtin - for item in sequence: - if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes): - result.append(item) - else: - do_flatten(item, result) - - -def flatten( # pylint: disable=redefined-outer-name,redefined-builtin - obj, - isinstance=isinstance, - StringTypes=StringTypes, - SequenceTypes=SequenceTypes, - do_flatten=do_flatten, -) -> list: - """Flatten a sequence to a non-nested list. - - Converts either a single scalar or a nested sequence to a non-nested list. - Note that :func:`flatten` considers strings - to be scalars instead of sequences like pure Python would. - """ - if isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes): - return [obj] - result = [] - for item in obj: - if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes): - result.append(item) - else: - do_flatten(item, result) - return result - - -def flatten_sequence( # pylint: disable=redefined-outer-name,redefined-builtin - sequence, - isinstance=isinstance, - StringTypes=StringTypes, - SequenceTypes=SequenceTypes, - do_flatten=do_flatten, -) -> list: - """Flatten a sequence to a non-nested list. - - Same as :func:`flatten`, but it does not handle the single scalar case. - This is slightly more efficient when one knows that the sequence - to flatten can not be a scalar. - """ - result = [] - for item in sequence: - if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes): - result.append(item) - else: - do_flatten(item, result) - return result - -# Generic convert-to-string functions. The wrapper -# to_String_for_signature() will use a for_signature() method if the -# specified object has one. - -def to_String( # pylint: disable=redefined-outer-name,redefined-builtin - obj, - isinstance=isinstance, - str=str, - UserString=UserString, - BaseStringTypes=BaseStringTypes, -) -> str: - """Return a string version of obj.""" - - if isinstance(obj, BaseStringTypes): - # Early out when already a string! - return obj - - if isinstance(obj, UserString): - # obj.data can only be a regular string. Please see the UserString initializer. - return obj.data - - return str(obj) - -def to_String_for_subst( # pylint: disable=redefined-outer-name,redefined-builtin - obj, - isinstance=isinstance, - str=str, - BaseStringTypes=BaseStringTypes, - SequenceTypes=SequenceTypes, - UserString=UserString, -) -> str: - """Return a string version of obj for subst usage.""" - - # Note that the test cases are sorted by order of probability. - if isinstance(obj, BaseStringTypes): - return obj - - if isinstance(obj, SequenceTypes): - return ' '.join([to_String_for_subst(e) for e in obj]) - - if isinstance(obj, UserString): - # obj.data can only a regular string. Please see the UserString initializer. - return obj.data - - return str(obj) - -def to_String_for_signature( # pylint: disable=redefined-outer-name,redefined-builtin - obj, to_String_for_subst=to_String_for_subst, AttributeError=AttributeError -) -> str: - """Return a string version of obj for signature usage. - - Like :func:`to_String_for_subst` but has special handling for - scons objects that have a :meth:`for_signature` method, and for dicts. - """ - - try: - f = obj.for_signature - except AttributeError: - if isinstance(obj, dict): - # pprint will output dictionary in key sorted order - # with py3.5 the order was randomized. In general depending on dictionary order - # which was undefined until py3.6 (where it's by insertion order) was not wise. - # TODO: Change code when floor is raised to PY36 - return pprint.pformat(obj, width=1000000) - return to_String_for_subst(obj) - else: - return f() - - -# The SCons "semi-deep" copy. -# -# This makes separate copies of lists (including UserList objects) -# dictionaries (including UserDict objects) and tuples, but just copies -# references to anything else it finds. -# -# A special case is any object that has a __semi_deepcopy__() method, -# which we invoke to create the copy. Currently only used by -# BuilderDict to actually prevent the copy operation (as invalid on that object). -# -# The dispatch table approach used here is a direct rip-off from the -# normal Python copy module. - -def semi_deepcopy_dict(obj, exclude=None) -> dict: - if exclude is None: - exclude = [] - return {k: semi_deepcopy(v) for k, v in obj.items() if k not in exclude} - -def _semi_deepcopy_list(obj) -> list: - return [semi_deepcopy(item) for item in obj] - -def _semi_deepcopy_tuple(obj) -> tuple: - return tuple(map(semi_deepcopy, obj)) - -_semi_deepcopy_dispatch = { - dict: semi_deepcopy_dict, - list: _semi_deepcopy_list, - tuple: _semi_deepcopy_tuple, -} - -def semi_deepcopy(obj): - copier = _semi_deepcopy_dispatch.get(type(obj)) - if copier: - return copier(obj) - - if hasattr(obj, '__semi_deepcopy__') and callable(obj.__semi_deepcopy__): - return obj.__semi_deepcopy__() - - if isinstance(obj, UserDict): - return obj.__class__(semi_deepcopy_dict(obj)) - - if isinstance(obj, UserList): - return obj.__class__(_semi_deepcopy_list(obj)) - - return obj - - -class Proxy: - """A simple generic Proxy class, forwarding all calls to subject. - - This means you can take an object, let's call it `'obj_a`, - and wrap it in this Proxy class, with a statement like this:: - - proxy_obj = Proxy(obj_a) - - Then, if in the future, you do something like this:: - - x = proxy_obj.var1 - - since the :class:`Proxy` class does not have a :attr:`var1` attribute - (but presumably `objA` does), the request actually is equivalent to saying:: - - x = obj_a.var1 - - Inherit from this class to create a Proxy. - - With Python 3.5+ this does *not* work transparently - for :class:`Proxy` subclasses that use special .__*__() method names, - because those names are now bound to the class, not the individual - instances. You now need to know in advance which special method names you - want to pass on to the underlying Proxy object, and specifically delegate - their calls like this:: - - class Foo(Proxy): - __str__ = Delegate('__str__') - """ - - def __init__(self, subject): - """Wrap an object as a Proxy object""" - self._subject = subject - - def __getattr__(self, name): - """Retrieve an attribute from the wrapped object. - - Raises: - AttributeError: if attribute `name` doesn't exist. - """ - return getattr(self._subject, name) - - def get(self): - """Retrieve the entire wrapped object""" - return self._subject - - def __eq__(self, other): - if issubclass(other.__class__, self._subject.__class__): - return self._subject == other - return self.__dict__ == other.__dict__ - - -class Delegate: - """A Python Descriptor class that delegates attribute fetches - to an underlying wrapped subject of a Proxy. Typical use:: - - class Foo(Proxy): - __str__ = Delegate('__str__') - """ - def __init__(self, attribute): - self.attribute = attribute - - def __get__(self, obj, cls): - if isinstance(obj, cls): - return getattr(obj._subject, self.attribute) - - return self - - -class MethodWrapper: - """A generic Wrapper class that associates a method with an object. - - As part of creating this MethodWrapper object an attribute with the - specified name (by default, the name of the supplied method) is added - to the underlying object. When that new "method" is called, our - :meth:`__call__` method adds the object as the first argument, simulating - the Python behavior of supplying "self" on method calls. - - We hang on to the name by which the method was added to the underlying - base class so that we can provide a method to "clone" ourselves onto - a new underlying object being copied (without which we wouldn't need - to save that info). - """ - def __init__(self, obj, method, name=None): - if name is None: - name = method.__name__ - self.object = obj - self.method = method - self.name = name - setattr(self.object, name, self) - - def __call__(self, *args, **kwargs): - nargs = (self.object,) + args - return self.method(*nargs, **kwargs) - - def clone(self, new_object): - """ - Returns an object that re-binds the underlying "method" to - the specified new object. - """ - return self.__class__(new_object, self.method, self.name) - - -# attempt to load the windows registry module: -can_read_reg = False -try: - import winreg - - can_read_reg = True - hkey_mod = winreg - -except ImportError: - class _NoError(Exception): - pass - RegError = _NoError - -if can_read_reg: - HKEY_CLASSES_ROOT = hkey_mod.HKEY_CLASSES_ROOT - HKEY_LOCAL_MACHINE = hkey_mod.HKEY_LOCAL_MACHINE - HKEY_CURRENT_USER = hkey_mod.HKEY_CURRENT_USER - HKEY_USERS = hkey_mod.HKEY_USERS - - RegOpenKeyEx = winreg.OpenKeyEx - RegEnumKey = winreg.EnumKey - RegEnumValue = winreg.EnumValue - RegQueryValueEx = winreg.QueryValueEx - RegError = winreg.error - - def RegGetValue(root, key): - r"""Returns a registry value without having to open the key first. - - Only available on Windows platforms with a version of Python that - can read the registry. - - Returns the same thing as :func:`RegQueryValueEx`, except you just - specify the entire path to the value, and don't have to bother - opening the key first. So, instead of:: - - k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, - r'SOFTWARE\Microsoft\Windows\CurrentVersion') - out = SCons.Util.RegQueryValueEx(k, 'ProgramFilesDir') - - You can write:: - - out = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE, - r'SOFTWARE\Microsoft\Windows\CurrentVersion\ProgramFilesDir') - """ - # I would use os.path.split here, but it's not a filesystem - # path... - p = key.rfind('\\') + 1 - keyp = key[: p - 1] # -1 to omit trailing slash - val = key[p:] - k = RegOpenKeyEx(root, keyp) - return RegQueryValueEx(k, val) - - -else: - HKEY_CLASSES_ROOT = None - HKEY_LOCAL_MACHINE = None - HKEY_CURRENT_USER = None - HKEY_USERS = None - - def RegGetValue(root, key): - raise OSError - - def RegOpenKeyEx(root, key): - raise OSError - - -if sys.platform == 'win32': - - def WhereIs(file, path=None, pathext=None, reject=None) -> Optional[str]: - if path is None: - try: - path = os.environ['PATH'] - except KeyError: - return None - if is_String(path): - path = path.split(os.pathsep) - if pathext is None: - try: - pathext = os.environ['PATHEXT'] - except KeyError: - pathext = '.COM;.EXE;.BAT;.CMD' - if is_String(pathext): - pathext = pathext.split(os.pathsep) - for ext in pathext: - if ext.lower() == file[-len(ext):].lower(): - pathext = [''] - break - if reject is None: - reject = [] - if not is_List(reject) and not is_Tuple(reject): - reject = [reject] - for p in path: - f = os.path.join(p, file) - for ext in pathext: - fext = f + ext - if os.path.isfile(fext): - try: - reject.index(fext) - except ValueError: - return os.path.normpath(fext) - continue - return None - -elif os.name == 'os2': - - def WhereIs(file, path=None, pathext=None, reject=None) -> Optional[str]: - if path is None: - try: - path = os.environ['PATH'] - except KeyError: - return None - if is_String(path): - path = path.split(os.pathsep) - if pathext is None: - pathext = ['.exe', '.cmd'] - for ext in pathext: - if ext.lower() == file[-len(ext):].lower(): - pathext = [''] - break - if reject is None: - reject = [] - if not is_List(reject) and not is_Tuple(reject): - reject = [reject] - for p in path: - f = os.path.join(p, file) - for ext in pathext: - fext = f + ext - if os.path.isfile(fext): - try: - reject.index(fext) - except ValueError: - return os.path.normpath(fext) - continue - return None - -else: - - def WhereIs(file, path=None, pathext=None, reject=None) -> Optional[str]: - import stat # pylint: disable=import-outside-toplevel - - if path is None: - try: - path = os.environ['PATH'] - except KeyError: - return None - if is_String(path): - path = path.split(os.pathsep) - if reject is None: - reject = [] - if not is_List(reject) and not is_Tuple(reject): - reject = [reject] - for p in path: - f = os.path.join(p, file) - if os.path.isfile(f): - try: - st = os.stat(f) - except OSError: - # os.stat() raises OSError, not IOError if the file - # doesn't exist, so in this case we let IOError get - # raised so as to not mask possibly serious disk or - # network issues. - continue - if stat.S_IMODE(st[stat.ST_MODE]) & 0o111: - try: - reject.index(f) - except ValueError: - return os.path.normpath(f) - continue - return None - -WhereIs.__doc__ = """\ -Return the path to an executable that matches `file`. - -Searches the given `path` for `file`, respecting any filename -extensions `pathext` (on the Windows platform only), and -returns the full path to the matching command. If no -command is found, return ``None``. - -If `path` is not specified, :attr:`os.environ[PATH]` is used. -If `pathext` is not specified, :attr:`os.environ[PATHEXT]` -is used. Will not select any path name or names in the optional -`reject` list. -""" - -def PrependPath( - oldpath, newpath, sep=os.pathsep, delete_existing=True, canonicalize=None -) -> Union[list, str]: - """Prepends `newpath` path elements to `oldpath`. - - Will only add any particular path once (leaving the first one it - encounters and ignoring the rest, to preserve path order), and will - :mod:`os.path.normpath` and :mod:`os.path.normcase` all paths to help - assure this. This can also handle the case where `oldpath` - is a list instead of a string, in which case a list will be returned - instead of a string. For example: - - >>> p = PrependPath("/foo/bar:/foo", "/biz/boom:/foo") - >>> print(p) - /biz/boom:/foo:/foo/bar - - If `delete_existing` is ``False``, then adding a path that exists will - not move it to the beginning; it will stay where it is in the list. - - >>> p = PrependPath("/foo/bar:/foo", "/biz/boom:/foo", delete_existing=False) - >>> print(p) - /biz/boom:/foo/bar:/foo - - If `canonicalize` is not ``None``, it is applied to each element of - `newpath` before use. - """ - - orig = oldpath - is_list = True - paths = orig - if not is_List(orig) and not is_Tuple(orig): - paths = paths.split(sep) - is_list = False - - if is_String(newpath): - newpaths = newpath.split(sep) - elif not is_List(newpath) and not is_Tuple(newpath): - newpaths = [ newpath ] # might be a Dir - else: - newpaths = newpath - - if canonicalize: - newpaths=list(map(canonicalize, newpaths)) - - if not delete_existing: - # First uniquify the old paths, making sure to - # preserve the first instance (in Unix/Linux, - # the first one wins), and remembering them in normpaths. - # Then insert the new paths at the head of the list - # if they're not already in the normpaths list. - result = [] - normpaths = [] - for path in paths: - if not path: - continue - normpath = os.path.normpath(os.path.normcase(path)) - if normpath not in normpaths: - result.append(path) - normpaths.append(normpath) - newpaths.reverse() # since we're inserting at the head - for path in newpaths: - if not path: - continue - normpath = os.path.normpath(os.path.normcase(path)) - if normpath not in normpaths: - result.insert(0, path) - normpaths.append(normpath) - paths = result - - else: - newpaths = newpaths + paths # prepend new paths - - normpaths = [] - paths = [] - # now we add them only if they are unique - for path in newpaths: - normpath = os.path.normpath(os.path.normcase(path)) - if path and normpath not in normpaths: - paths.append(path) - normpaths.append(normpath) - - if is_list: - return paths - - return sep.join(paths) - -def AppendPath( - oldpath, newpath, sep=os.pathsep, delete_existing=True, canonicalize=None -) -> Union[list, str]: - """Appends `newpath` path elements to `oldpath`. - - Will only add any particular path once (leaving the last one it - encounters and ignoring the rest, to preserve path order), and will - :mod:`os.path.normpath` and :mod:`os.path.normcase` all paths to help - assure this. This can also handle the case where `oldpath` - is a list instead of a string, in which case a list will be returned - instead of a string. For example: - - >>> p = AppendPath("/foo/bar:/foo", "/biz/boom:/foo") - >>> print(p) - /foo/bar:/biz/boom:/foo - - If `delete_existing` is ``False``, then adding a path that exists - will not move it to the end; it will stay where it is in the list. - - >>> p = AppendPath("/foo/bar:/foo", "/biz/boom:/foo", delete_existing=False) - >>> print(p) - /foo/bar:/foo:/biz/boom - - If `canonicalize` is not ``None``, it is applied to each element of - `newpath` before use. - """ - - orig = oldpath - is_list = True - paths = orig - if not is_List(orig) and not is_Tuple(orig): - paths = paths.split(sep) - is_list = False - - if is_String(newpath): - newpaths = newpath.split(sep) - elif not is_List(newpath) and not is_Tuple(newpath): - newpaths = [newpath] # might be a Dir - else: - newpaths = newpath - - if canonicalize: - newpaths=list(map(canonicalize, newpaths)) - - if not delete_existing: - # add old paths to result, then - # add new paths if not already present - # (I thought about using a dict for normpaths for speed, - # but it's not clear hashing the strings would be faster - # than linear searching these typically short lists.) - result = [] - normpaths = [] - for path in paths: - if not path: - continue - result.append(path) - normpaths.append(os.path.normpath(os.path.normcase(path))) - for path in newpaths: - if not path: - continue - normpath = os.path.normpath(os.path.normcase(path)) - if normpath not in normpaths: - result.append(path) - normpaths.append(normpath) - paths = result - else: - # start w/ new paths, add old ones if not present, - # then reverse. - newpaths = paths + newpaths # append new paths - newpaths.reverse() - - normpaths = [] - paths = [] - # now we add them only if they are unique - for path in newpaths: - normpath = os.path.normpath(os.path.normcase(path)) - if path and normpath not in normpaths: - paths.append(path) - normpaths.append(normpath) - paths.reverse() - - if is_list: - return paths - - return sep.join(paths) - -def AddPathIfNotExists(env_dict, key, path, sep=os.pathsep): - """Add a path element to a construction variable. - - `key` is looked up in `env_dict`, and `path` is added to it if it - is not already present. `env_dict[key]` is assumed to be in the - format of a PATH variable: a list of paths separated by `sep` tokens. - Example: - - >>> env = {'PATH': '/bin:/usr/bin:/usr/local/bin'} - >>> AddPathIfNotExists(env, 'PATH', '/opt/bin') - >>> print(env['PATH']) - /opt/bin:/bin:/usr/bin:/usr/local/bin - """ - - try: - is_list = True - paths = env_dict[key] - if not is_List(env_dict[key]): - paths = paths.split(sep) - is_list = False - if os.path.normcase(path) not in list(map(os.path.normcase, paths)): - paths = [ path ] + paths - if is_list: - env_dict[key] = paths - else: - env_dict[key] = sep.join(paths) - except KeyError: - env_dict[key] = path - -if sys.platform == 'cygwin': - import subprocess # pylint: disable=import-outside-toplevel - - def get_native_path(path) -> str: - cp = subprocess.run(('cygpath', '-w', path), check=False, stdout=subprocess.PIPE) - return cp.stdout.decode().replace('\n', '') -else: - def get_native_path(path) -> str: - return path - -get_native_path.__doc__ = """\ -Transform an absolute path into a native path for the system. - -In Cygwin, this converts from a Cygwin path to a Windows path, -without regard to whether `path` refers to an existing file -system object. For other platforms, `path` is unchanged. -""" - - -display = DisplayEngine() - -def Split(arg) -> list: - """Returns a list of file names or other objects. - - If `arg` is a string, it will be split on strings of white-space - characters within the string. If `arg` is already a list, the list - will be returned untouched. If `arg` is any other type of object, - it will be returned as a list containing just the object. - - >>> print(Split(" this is a string ")) - ['this', 'is', 'a', 'string'] - >>> print(Split(["stringlist", " preserving ", " spaces "])) - ['stringlist', ' preserving ', ' spaces '] - """ - if is_List(arg) or is_Tuple(arg): - return arg - - if is_String(arg): - return arg.split() - - return [arg] - - -class CLVar(UserList): - """A container for command-line construction variables. - - Forces the use of a list of strings intended as command-line - arguments. Like :class:`collections.UserList`, but the argument - passed to the initializter will be processed by the :func:`Split` - function, which includes special handling for string types: they - will be split into a list of words, not coereced directly to a list. - The same happens if a string is added to a :class:`CLVar`, - which allows doing the right thing with both - :func:`Append`/:func:`Prepend` methods, - as well as with pure Python addition, regardless of whether adding - a list or a string to a construction variable. - - Side effect: spaces will be stripped from individual string - arguments. If you need spaces preserved, pass strings containing - spaces inside a list argument. - - >>> u = UserList("--some --opts and args") - >>> print(len(u), repr(u)) - 22 ['-', '-', 's', 'o', 'm', 'e', ' ', '-', '-', 'o', 'p', 't', 's', ' ', 'a', 'n', 'd', ' ', 'a', 'r', 'g', 's'] - >>> c = CLVar("--some --opts and args") - >>> print(len(c), repr(c)) - 4 ['--some', '--opts', 'and', 'args'] - >>> c += " strips spaces " - >>> print(len(c), repr(c)) - 6 ['--some', '--opts', 'and', 'args', 'strips', 'spaces'] - """ - - def __init__(self, initlist=None): - super().__init__(Split(initlist if initlist is not None else [])) - - def __add__(self, other): - return super().__add__(CLVar(other)) - - def __radd__(self, other): - return super().__radd__(CLVar(other)) - - def __iadd__(self, other): - return super().__iadd__(CLVar(other)) - - def __str__(self): - # Some cases the data can contain Nodes, so make sure they - # processed to string before handing them over to join. - return ' '.join([str(d) for d in self.data]) - - -class Selector(OrderedDict): - """A callable ordered dictionary that maps file suffixes to - dictionary values. We preserve the order in which items are added - so that :func:`get_suffix` calls always return the first suffix added. - """ - def __call__(self, env, source, ext=None): - if ext is None: - try: - ext = source[0].get_suffix() - except IndexError: - ext = "" - try: - return self[ext] - except KeyError: - # Try to perform Environment substitution on the keys of - # the dictionary before giving up. - s_dict = {} - for (k,v) in self.items(): - if k is not None: - s_k = env.subst(k) - if s_k in s_dict: - # We only raise an error when variables point - # to the same suffix. If one suffix is literal - # and a variable suffix contains this literal, - # the literal wins and we don't raise an error. - raise KeyError(s_dict[s_k][0], k, s_k) - s_dict[s_k] = (k,v) - try: - return s_dict[ext][1] - except KeyError: - try: - return self[None] - except KeyError: - return None - - -if sys.platform == 'cygwin': - # On Cygwin, os.path.normcase() lies, so just report back the - # fact that the underlying Windows OS is case-insensitive. - def case_sensitive_suffixes(s1, s2) -> bool: # pylint: disable=unused-argument - return False - -else: - def case_sensitive_suffixes(s1, s2) -> bool: - return os.path.normcase(s1) != os.path.normcase(s2) - - -def adjustixes(fname, pre, suf, ensure_suffix=False) -> str: - """Adjust filename prefixes and suffixes as needed. - - Add `prefix` to `fname` if specified. - Add `suffix` to `fname` if specified and if `ensure_suffix` is ``True`` - """ - - if pre: - path, fn = os.path.split(os.path.normpath(fname)) - - # Handle the odd case where the filename = the prefix. - # In that case, we still want to add the prefix to the file - if not fn.startswith(pre) or fn == pre: - fname = os.path.join(path, pre + fn) - # Only append a suffix if the suffix we're going to add isn't already - # there, and if either we've been asked to ensure the specific suffix - # is present or there's no suffix on it at all. - # Also handle the odd case where the filename = the suffix. - # in that case we still want to append the suffix - if suf and not fname.endswith(suf) and \ - (ensure_suffix or not splitext(fname)[1]): - fname = fname + suf - return fname - - - -# From Tim Peters, -# https://code.activestate.com/recipes/52560 -# ASPN: Python Cookbook: Remove duplicates from a sequence -# (Also in the printed Python Cookbook.) -# Updated. This algorithm is used by some scanners and tools. - -def unique(seq): - """Return a list of the elements in seq without duplicates, ignoring order. - - >>> mylist = unique([1, 2, 3, 1, 2, 3]) - >>> print(sorted(mylist)) - [1, 2, 3] - >>> mylist = unique("abcabc") - >>> print(sorted(mylist)) - ['a', 'b', 'c'] - >>> mylist = unique(([1, 2], [2, 3], [1, 2])) - >>> print(sorted(mylist)) - [[1, 2], [2, 3]] - - For best speed, all sequence elements should be hashable. Then - unique() will usually work in linear time. - - If not possible, the sequence elements should enjoy a total - ordering, and if list(s).sort() doesn't raise TypeError it's - assumed that they do enjoy a total ordering. Then unique() will - usually work in O(N*log2(N)) time. - - If that's not possible either, the sequence elements must support - equality-testing. Then unique() will usually work in quadratic time. - """ - - if not seq: - return [] - - # Try using a dict first, as that's the fastest and will usually - # work. If it doesn't work, it will usually fail quickly, so it - # usually doesn't cost much to *try* it. It requires that all the - # sequence elements be hashable, and support equality comparison. - # TODO: should be even faster: return(list(set(seq))) - with suppress(TypeError): - return list(dict.fromkeys(seq)) - - # We couldn't hash all the elements (got a TypeError). - # Next fastest is to sort, which brings the equal elements together; - # then duplicates are easy to weed out in a single pass. - # NOTE: Python's list.sort() was designed to be efficient in the - # presence of many duplicate elements. This isn't true of all - # sort functions in all languages or libraries, so this approach - # is more effective in Python than it may be elsewhere. - n = len(seq) - try: - t = sorted(seq) - except TypeError: - pass # move on to the next method - else: - last = t[0] - lasti = i = 1 - while i < n: - if t[i] != last: - t[lasti] = last = t[i] - lasti = lasti + 1 - i = i + 1 - return t[:lasti] - - # Brute force is all that's left. - u = [] - for x in seq: - if x not in u: - u.append(x) - return u - - -# From Alex Martelli, -# https://code.activestate.com/recipes/52560 -# ASPN: Python Cookbook: Remove duplicates from a sequence -# First comment, dated 2001/10/13. -# (Also in the printed Python Cookbook.) -# This not currently used, in favor of the next function... - -def uniquer(seq, idfun=None): - def default_idfun(x): - return x - if not idfun: - idfun = default_idfun - seen = {} - result = [] - result_append = result.append # perf: avoid repeated method lookups - for item in seq: - marker = idfun(item) - if marker in seen: - continue - seen[marker] = 1 - result_append(item) - return result - -# A more efficient implementation of Alex's uniquer(), this avoids the -# idfun() argument and function-call overhead by assuming that all -# items in the sequence are hashable. Order-preserving. - -def uniquer_hashables(seq): - seen = {} - result = [] - result_append = result.append # perf: avoid repeated method lookups - for item in seq: - if item not in seen: - seen[item] = 1 - result_append(item) - return result - - -# Recipe 19.11 "Reading Lines with Continuation Characters", -# by Alex Martelli, straight from the Python CookBook (2nd edition). -def logical_lines(physical_lines, joiner=''.join): - logical_line = [] - for line in physical_lines: - stripped = line.rstrip() - if stripped.endswith('\\'): - # a line which continues w/the next physical line - logical_line.append(stripped[:-1]) - else: - # a line which does not continue, end of logical line - logical_line.append(line) - yield joiner(logical_line) - logical_line = [] - if logical_line: - # end of sequence implies end of last logical line - yield joiner(logical_line) - - -class LogicalLines: - """ Wrapper class for the logical_lines method. - - Allows us to read all "logical" lines at once from a given file object. - """ - - def __init__(self, fileobj): - self.fileobj = fileobj - - def readlines(self): - return list(logical_lines(self.fileobj)) - - -class UniqueList(UserList): - """A list which maintains uniqueness. - - Uniquing is lazy: rather than being assured on list changes, it is fixed - up on access by those methods which need to act on a uniqe list to be - correct. That means things like "in" don't have to eat the uniquing time. - """ - def __init__(self, initlist=None): - super().__init__(initlist) - self.unique = True - - def __make_unique(self): - if not self.unique: - self.data = uniquer_hashables(self.data) - self.unique = True - - def __repr__(self): - self.__make_unique() - return super().__repr__() - - def __lt__(self, other): - self.__make_unique() - return super().__lt__(other) - - def __le__(self, other): - self.__make_unique() - return super().__le__(other) - - def __eq__(self, other): - self.__make_unique() - return super().__eq__(other) - - def __ne__(self, other): - self.__make_unique() - return super().__ne__(other) - - def __gt__(self, other): - self.__make_unique() - return super().__gt__(other) - - def __ge__(self, other): - self.__make_unique() - return super().__ge__(other) - - # __contains__ doesn't need to worry about uniquing, inherit - - def __len__(self): - self.__make_unique() - return super().__len__() - - def __getitem__(self, i): - self.__make_unique() - return super().__getitem__(i) - - def __setitem__(self, i, item): - super().__setitem__(i, item) - self.unique = False - - # __delitem__ doesn't need to worry about uniquing, inherit - - def __add__(self, other): - result = super().__add__(other) - result.unique = False - return result - - def __radd__(self, other): - result = super().__radd__(other) - result.unique = False - return result - - def __iadd__(self, other): - result = super().__iadd__(other) - result.unique = False - return result - - def __mul__(self, other): - result = super().__mul__(other) - result.unique = False - return result - - def __rmul__(self, other): - result = super().__rmul__(other) - result.unique = False - return result - - def __imul__(self, other): - result = super().__imul__(other) - result.unique = False - return result - - def append(self, item): - super().append(item) - self.unique = False - - def insert(self, i, item): - super().insert(i, item) - self.unique = False - - def count(self, item): - self.__make_unique() - return super().count(item) - - def index(self, item, *args): - self.__make_unique() - return super().index(item, *args) - - def reverse(self): - self.__make_unique() - super().reverse() - - # TODO: Py3.8: def sort(self, /, *args, **kwds): - def sort(self, *args, **kwds): - self.__make_unique() - return super().sort(*args, **kwds) - - def extend(self, other): - super().extend(other) - self.unique = False - - -class Unbuffered: - """A proxy that wraps a file object, flushing after every write. - - Delegates everything else to the wrapped object. - """ - def __init__(self, file): - self.file = file - - def write(self, arg): - # Stdout might be connected to a pipe that has been closed - # by now. The most likely reason for the pipe being closed - # is that the user has press ctrl-c. It this is the case, - # then SCons is currently shutdown. We therefore ignore - # IOError's here so that SCons can continue and shutdown - # properly so that the .sconsign is correctly written - # before SCons exits. - with suppress(IOError): - self.file.write(arg) - self.file.flush() - - def writelines(self, arg): - with suppress(IOError): - self.file.writelines(arg) - self.file.flush() - - def __getattr__(self, attr): - return getattr(self.file, attr) - -def make_path_relative(path) -> str: - """Converts an absolute path name to a relative pathname.""" - - if os.path.isabs(path): - drive_s, path = os.path.splitdrive(path) - - if not drive_s: - path=re.compile(r"/*(.*)").findall(path)[0] - else: - path=path[1:] - - assert not os.path.isabs(path), path - return path - - -# The original idea for AddMethod() came from the -# following post to the ActiveState Python Cookbook: -# -# ASPN: Python Cookbook : Install bound methods in an instance -# https://code.activestate.com/recipes/223613 -# -# Changed as follows: -# * Switched the installmethod() "object" and "function" arguments, -# so the order reflects that the left-hand side is the thing being -# "assigned to" and the right-hand side is the value being assigned. -# * The instance/class detection is changed a bit, as it's all -# new-style classes now with Py3. -# * The by-hand construction of the function object from renamefunction() -# is not needed, the remaining bit is now used inline in AddMethod. - -def AddMethod(obj, function, name=None): - """Adds a method to an object. - - Adds `function` to `obj` if `obj` is a class object. - Adds `function` as a bound method if `obj` is an instance object. - If `obj` looks like an environment instance, use `MethodWrapper` - to add it. If `name` is supplied it is used as the name of `function`. - - Although this works for any class object, the intent as a public - API is to be used on Environment, to be able to add a method to all - construction environments; it is preferred to use env.AddMethod - to add to an individual environment. - - >>> class A: - ... ... - - >>> a = A() - - >>> def f(self, x, y): - ... self.z = x + y - - >>> AddMethod(A, f, "add") - >>> a.add(2, 4) - >>> print(a.z) - 6 - >>> a.data = ['a', 'b', 'c', 'd', 'e', 'f'] - >>> AddMethod(a, lambda self, i: self.data[i], "listIndex") - >>> print(a.listIndex(3)) - d - - """ - if name is None: - name = function.__name__ - else: - # "rename" - function = FunctionType( - function.__code__, function.__globals__, name, function.__defaults__ - ) - - if hasattr(obj, '__class__') and obj.__class__ is not type: - # obj is an instance, so it gets a bound method. - if hasattr(obj, "added_methods"): - method = MethodWrapper(obj, function, name) - obj.added_methods.append(method) - else: - method = MethodType(function, obj) - else: - # obj is a class - method = function - - setattr(obj, name, method) - - -# Default hash function and format. SCons-internal. -DEFAULT_HASH_FORMATS = ['md5', 'sha1', 'sha256'] -ALLOWED_HASH_FORMATS = [] -_HASH_FUNCTION = None -_HASH_FORMAT = None - -def _attempt_init_of_python_3_9_hash_object(hash_function_object, sys_used=sys): - """Python 3.9 and onwards lets us initialize the hash function object with the - key "usedforsecurity"=false. This lets us continue to use algorithms that have - been deprecated either by FIPS or by Python itself, as the MD5 algorithm SCons - prefers is not being used for security purposes as much as a short, 32 char - hash that is resistant to accidental collisions. - - In prior versions of python, hashlib returns a native function wrapper, which - errors out when it's queried for the optional parameter, so this function - wraps that call. - - It can still throw a ValueError if the initialization fails due to FIPS - compliance issues, but that is assumed to be the responsibility of the caller. - """ - if hash_function_object is None: - return None - - # https://stackoverflow.com/a/11887885 details how to check versions with the "packaging" library. - # however, for our purposes checking the version is greater than or equal to 3.9 is good enough, as - # the API is guaranteed to have support for the 'usedforsecurity' flag in 3.9. See - # https://docs.python.org/3/library/hashlib.html#:~:text=usedforsecurity for the version support notes. - if (sys_used.version_info.major > 3) or (sys_used.version_info.major == 3 and sys_used.version_info.minor >= 9): - return hash_function_object(usedforsecurity=False) - - # note that this can throw a ValueError in FIPS-enabled versions of Linux prior to 3.9 - # the OpenSSL hashlib will throw on first init here, but that is assumed to be responsibility of - # the caller to diagnose the ValueError & potentially display the error to screen. - return hash_function_object() - -def _set_allowed_viable_default_hashes(hashlib_used, sys_used=sys): - """Checks if SCons has ability to call the default algorithms normally supported. - - This util class is sometimes called prior to setting the user-selected hash algorithm, - meaning that on FIPS-compliant systems the library would default-initialize MD5 - and throw an exception in set_hash_format. A common case is using the SConf options, - which can run prior to main, and thus ignore the options.hash_format variable. - - This function checks the DEFAULT_HASH_FORMATS and sets the ALLOWED_HASH_FORMATS - to only the ones that can be called. In Python >= 3.9 this will always default to - MD5 as in Python 3.9 there is an optional attribute "usedforsecurity" set for the method. - - Throws if no allowed hash formats are detected. - """ - global ALLOWED_HASH_FORMATS - _last_error = None - # note: if you call this method repeatedly, example using timeout, this is needed. - # otherwise it keeps appending valid formats to the string - ALLOWED_HASH_FORMATS = [] - - for test_algorithm in DEFAULT_HASH_FORMATS: - _test_hash = getattr(hashlib_used, test_algorithm, None) - # we know hashlib claims to support it... check to see if we can call it. - if _test_hash is not None: - # the hashing library will throw an exception on initialization in FIPS mode, - # meaning if we call the default algorithm returned with no parameters, it'll - # throw if it's a bad algorithm, otherwise it will append it to the known - # good formats. - try: - _attempt_init_of_python_3_9_hash_object(_test_hash, sys_used) - ALLOWED_HASH_FORMATS.append(test_algorithm) - except ValueError as e: - _last_error = e - continue - - if len(ALLOWED_HASH_FORMATS) == 0: - from SCons.Errors import SConsEnvironmentError # pylint: disable=import-outside-toplevel - # chain the exception thrown with the most recent error from hashlib. - raise SConsEnvironmentError( - 'No usable hash algorithms found.' - 'Most recent error from hashlib attached in trace.' - ) from _last_error - return - -_set_allowed_viable_default_hashes(hashlib) - - -def get_hash_format(): - """Retrieves the hash format or ``None`` if not overridden. - - A return value of ``None`` - does not guarantee that MD5 is being used; instead, it means that the - default precedence order documented in :func:`SCons.Util.set_hash_format` - is respected. - """ - return _HASH_FORMAT - -def _attempt_get_hash_function(hash_name, hashlib_used=hashlib, sys_used=sys): - """Wrapper used to try to initialize a hash function given. - - If successful, returns the name of the hash function back to the user. - - Otherwise returns None. - """ - try: - _fetch_hash = getattr(hashlib_used, hash_name, None) - if _fetch_hash is None: - return None - _attempt_init_of_python_3_9_hash_object(_fetch_hash, sys_used) - return hash_name - except ValueError: - # if attempt_init_of_python_3_9 throws, this is typically due to FIPS being enabled - # however, if we get to this point, the viable hash function check has either been - # bypassed or otherwise failed to properly restrict the user to only the supported - # functions. As such throw the UserError as an internal assertion-like error. - return None - -def set_hash_format(hash_format, hashlib_used=hashlib, sys_used=sys): - """Sets the default hash format used by SCons. - - If `hash_format` is ``None`` or - an empty string, the default is determined by this function. - - Currently the default behavior is to use the first available format of - the following options: MD5, SHA1, SHA256. - """ - global _HASH_FORMAT, _HASH_FUNCTION - - _HASH_FORMAT = hash_format - if hash_format: - hash_format_lower = hash_format.lower() - if hash_format_lower not in ALLOWED_HASH_FORMATS: - from SCons.Errors import UserError # pylint: disable=import-outside-toplevel - - # user can select something not supported by their OS but normally supported by - # SCons, example, selecting MD5 in an OS with FIPS-mode turned on. Therefore we first - # check if SCons supports it, and then if their local OS supports it. - if hash_format_lower in DEFAULT_HASH_FORMATS: - raise UserError('While hash format "%s" is supported by SCons, the ' - 'local system indicates only the following hash ' - 'formats are supported by the hashlib library: %s' % - (hash_format_lower, - ', '.join(ALLOWED_HASH_FORMATS)) - ) - else: - # the hash format isn't supported by SCons in any case. Warn the user, and - # if we detect that SCons supports more algorithms than their local system - # supports, warn the user about that too. - if ALLOWED_HASH_FORMATS == DEFAULT_HASH_FORMATS: - raise UserError('Hash format "%s" is not supported by SCons. Only ' - 'the following hash formats are supported: %s' % - (hash_format_lower, - ', '.join(ALLOWED_HASH_FORMATS)) - ) - else: - raise UserError('Hash format "%s" is not supported by SCons. ' - 'SCons supports more hash formats than your local system ' - 'is reporting; SCons supports: %s. Your local system only ' - 'supports: %s' % - (hash_format_lower, - ', '.join(DEFAULT_HASH_FORMATS), - ', '.join(ALLOWED_HASH_FORMATS)) - ) - - # this is not expected to fail. If this fails it means the set_allowed_viable_default_hashes - # function did not throw, or when it threw, the exception was caught and ignored, or - # the global ALLOWED_HASH_FORMATS was changed by an external user. - _HASH_FUNCTION = _attempt_get_hash_function(hash_format_lower, hashlib_used, sys_used) - - if _HASH_FUNCTION is None: - from SCons.Errors import UserError # pylint: disable=import-outside-toplevel - - raise UserError( - 'Hash format "%s" is not available in your Python interpreter. ' - 'Expected to be supported algorithm by set_allowed_viable_default_hashes, ' - 'Assertion error in SCons.' - % hash_format_lower - ) - else: - # Set the default hash format based on what is available, defaulting - # to the first supported hash algorithm (usually md5) for backwards compatibility. - # in FIPS-compliant systems this usually defaults to SHA1, unless that too has been - # disabled. - for choice in ALLOWED_HASH_FORMATS: - _HASH_FUNCTION = _attempt_get_hash_function(choice, hashlib_used, sys_used) - - if _HASH_FUNCTION is not None: - break - else: - # This is not expected to happen in practice. - from SCons.Errors import UserError # pylint: disable=import-outside-toplevel - - raise UserError( - 'Your Python interpreter does not have MD5, SHA1, or SHA256. ' - 'SCons requires at least one. Expected to support one or more ' - 'during set_allowed_viable_default_hashes.' - ) - -# Ensure that this is initialized in case either: -# 1. This code is running in a unit test. -# 2. This code is running in a consumer that does hash operations while -# SConscript files are being loaded. -set_hash_format(None) - - -def get_current_hash_algorithm_used(): - """Returns the current hash algorithm name used. - - Where the python version >= 3.9, this is expected to return md5. - If python's version is <= 3.8, this returns md5 on non-FIPS-mode platforms, and - sha1 or sha256 on FIPS-mode Linux platforms. - - This function is primarily useful for testing, where one expects a value to be - one of N distinct hashes, and therefore the test needs to know which hash to select. - """ - return _HASH_FUNCTION - -def _get_hash_object(hash_format, hashlib_used=hashlib, sys_used=sys): - """Allocates a hash object using the requested hash format. - - Args: - hash_format: Hash format to use. - - Returns: - hashlib object. - """ - if hash_format is None: - if _HASH_FUNCTION is None: - from SCons.Errors import UserError # pylint: disable=import-outside-toplevel - - raise UserError('There is no default hash function. Did you call ' - 'a hashing function before SCons was initialized?') - return _attempt_init_of_python_3_9_hash_object(getattr(hashlib_used, _HASH_FUNCTION, None), sys_used) - - if not hasattr(hashlib, hash_format): - from SCons.Errors import UserError # pylint: disable=import-outside-toplevel - - raise UserError( - 'Hash format "%s" is not available in your Python interpreter.' % - hash_format) - - return _attempt_init_of_python_3_9_hash_object(getattr(hashlib, hash_format), sys_used) - - -def hash_signature(s, hash_format=None): - """ - Generate hash signature of a string - - Args: - s: either string or bytes. Normally should be bytes - hash_format: Specify to override default hash format - - Returns: - String of hex digits representing the signature - """ - m = _get_hash_object(hash_format) - try: - m.update(to_bytes(s)) - except TypeError: - m.update(to_bytes(str(s))) - - return m.hexdigest() - - -def hash_file_signature(fname, chunksize=65536, hash_format=None): - """ - Generate the md5 signature of a file - - Args: - fname: file to hash - chunksize: chunk size to read - hash_format: Specify to override default hash format - - Returns: - String of Hex digits representing the signature - """ - - m = _get_hash_object(hash_format) - with open(fname, "rb") as f: - while True: - blck = f.read(chunksize) - if not blck: - break - m.update(to_bytes(blck)) - return m.hexdigest() - - -def hash_collect(signatures, hash_format=None): - """ - Collects a list of signatures into an aggregate signature. - - Args: - signatures: a list of signatures - hash_format: Specify to override default hash format - - Returns: - the aggregate signature - """ - - if len(signatures) == 1: - return signatures[0] - - return hash_signature(', '.join(signatures), hash_format) - - -_MD5_WARNING_SHOWN = False - -def _show_md5_warning(function_name): - """Shows a deprecation warning for various MD5 functions.""" - - global _MD5_WARNING_SHOWN - - if not _MD5_WARNING_SHOWN: - import SCons.Warnings # pylint: disable=import-outside-toplevel - - SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning, - "Function %s is deprecated" % function_name) - _MD5_WARNING_SHOWN = True - - -def MD5signature(s): - """Deprecated. Use :func:`hash_signature` instead.""" - - _show_md5_warning("MD5signature") - return hash_signature(s) - - -def MD5filesignature(fname, chunksize=65536): - """Deprecated. Use :func:`hash_file_signature` instead.""" - - _show_md5_warning("MD5filesignature") - return hash_file_signature(fname, chunksize) - - -def MD5collect(signatures): - """Deprecated. Use :func:`hash_collect` instead.""" - - _show_md5_warning("MD5collect") - return hash_collect(signatures) - - -def silent_intern(x): - """ - Perform :mod:`sys.intern` on the passed argument and return the result. - If the input is ineligible for interning the original argument is - returned and no exception is thrown. - """ - try: - return sys.intern(x) - except TypeError: - return x - - -# From Dinu C. Gherman, -# Python Cookbook, second edition, recipe 6.17, p. 277. -# Also: https://code.activestate.com/recipes/68205 -# ASPN: Python Cookbook: Null Object Design Pattern - -class Null: - """ Null objects always and reliably "do nothing." """ - def __new__(cls, *args, **kwargs): - if '_instance' not in vars(cls): - cls._instance = super(Null, cls).__new__(cls, *args, **kwargs) - return cls._instance - def __init__(self, *args, **kwargs): - pass - def __call__(self, *args, **kwargs): - return self - def __repr__(self): - return "Null(0x%08X)" % id(self) - def __bool__(self): - return False - def __getattr__(self, name): - return self - def __setattr__(self, name, value): - return self - def __delattr__(self, name): - return self - - -class NullSeq(Null): - """ A Null object that can also be iterated over. """ - def __len__(self): - return 0 - def __iter__(self): - return iter(()) - def __getitem__(self, i): - return self - def __delitem__(self, i): - return self - def __setitem__(self, i, v): - return self - - -def to_bytes(s) -> bytes: - if s is None: - return b'None' - if isinstance(s, (bytes, bytearray)): - # if already bytes return. - return s - return bytes(s, 'utf-8') - - -def to_str(s) -> str: - if s is None: - return 'None' - if is_String(s): - return s - return str(s, 'utf-8') - - -def cmp(a, b) -> bool: - """A cmp function because one is no longer available in python3.""" - return (a > b) - (a < b) - - -def get_env_bool(env, name, default=False) -> bool: - """Convert a construction variable to bool. - - If the value of `name` in `env` is 'true', 'yes', 'y', 'on' (case - insensitive) or anything convertible to int that yields non-zero then - return ``True``; if 'false', 'no', 'n', 'off' (case insensitive) - or a number that converts to integer zero return ``False``. - Otherwise, return `default`. - - Args: - env: construction environment, or any dict-like object - name: name of the variable - default: value to return if `name` not in `env` or cannot - be converted (default: False) - - Returns: - the "truthiness" of `name` - """ - try: - var = env[name] - except KeyError: - return default - try: - return bool(int(var)) - except ValueError: - if str(var).lower() in ('true', 'yes', 'y', 'on'): - return True - - if str(var).lower() in ('false', 'no', 'n', 'off'): - return False - - return default - - -def get_os_env_bool(name, default=False) -> bool: - """Convert an environment variable to bool. - - Conversion is the same as for :func:`get_env_bool`. - """ - return get_env_bool(os.environ, name, default) - - -def print_time(): - """Hack to return a value from Main if can't import Main.""" - # pylint: disable=redefined-outer-name,import-outside-toplevel - from SCons.Script.Main import print_time - return print_time - - -def wait_for_process_to_die(pid): - """ - Wait for specified process to die, or alternatively kill it - NOTE: This function operates best with psutil pypi package - TODO: Add timeout which raises exception - """ - # wait for the process to fully killed - try: - import psutil - while True: - if pid not in [proc.pid for proc in psutil.process_iter()]: - break - else: - time.sleep(0.1) - except ImportError: - # if psutil is not installed we can do this the hard way - while True: - if sys.platform == 'win32': - import ctypes - PROCESS_QUERY_INFORMATION = 0x1000 - processHandle = ctypes.windll.kernel32.OpenProcess(PROCESS_QUERY_INFORMATION, 0,pid) - if processHandle == 0: - break - else: - ctypes.windll.kernel32.CloseHandle(processHandle) - time.sleep(0.1) - else: - try: - os.kill(pid, 0) - except OSError: - break - else: - time.sleep(0.1) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/SCons/UtilTests.py scons-4.5.2+dfsg/SCons/UtilTests.py --- scons-4.4.0+dfsg/SCons/UtilTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/UtilTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -46,10 +46,6 @@ Proxy, Selector, WhereIs, - _attempt_init_of_python_3_9_hash_object, - _attempt_get_hash_function, - _get_hash_object, - _set_allowed_viable_default_hashes, adjustixes, containsAll, containsAny, @@ -76,6 +72,12 @@ to_bytes, to_str, ) +from SCons.Util.hashes import ( + _attempt_init_of_python_3_9_hash_object, + _attempt_get_hash_function, + _get_hash_object, + _set_allowed_viable_default_hashes, +) # These Util classes have no unit tests. Some don't make sense to test? # DisplayEngine, Delegate, MethodWrapper, UniqueList, Unbuffered, Null, NullSeq diff -Nru scons-4.4.0+dfsg/SCons/Variables/BoolVariable.py scons-4.5.2+dfsg/SCons/Variables/BoolVariable.py --- scons-4.4.0+dfsg/SCons/Variables/BoolVariable.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Variables/BoolVariable.py 2023-03-21 16:17:04.000000000 +0000 @@ -26,10 +26,10 @@ Usage example:: opts = Variables() - opts.Add(BoolVariable('embedded', 'build for an embedded system', 0)) - ... - if env['embedded'] == 1: + opts.Add(BoolVariable('embedded', 'build for an embedded system', False)) ... + if env['embedded']: + ... """ from typing import Tuple, Callable @@ -42,17 +42,17 @@ FALSE_STRINGS = ('n', 'no', 'false', 'f', '0', 'off', 'none') -def _text2bool(val) -> bool: - """Converts strings to True/False. +def _text2bool(val: str) -> bool: + """Convert boolean-like string to boolean. If *val* looks like it expresses a bool-like value, based on - the :data:`TRUE_STRINGS` and :data:`FALSE_STRINGS` tuples, + the :const:`TRUE_STRINGS` and :const:`FALSE_STRINGS` tuples, return the appropriate value. This is usable as a converter function for SCons Variables. Raises: - ValueError: if the string cannot be converted. + ValueError: if *val* cannot be converted to boolean. """ lval = val.lower() @@ -64,13 +64,15 @@ def _validator(key, val, env) -> None: - """Validates the given value to be either true or false. + """Validate that the value of *key* in *env* is a boolean. + + Parmaeter *val* is not used in the check. - This is usable as a validator function for SCons Variables. + Usable as a validator function for SCons Variables. Raises: - KeyError: if key is not set in env - UserError: if key does not validate. + KeyError: if *key* is not set in *env* + UserError: if the value of *key* is not ``True`` or ``False``. """ if not env[key] in (True, False): raise SCons.Errors.UserError( diff -Nru scons-4.4.0+dfsg/SCons/Variables/BoolVariableTests.py scons-4.5.2+dfsg/SCons/Variables/BoolVariableTests.py --- scons-4.4.0+dfsg/SCons/Variables/BoolVariableTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Variables/BoolVariableTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -30,7 +30,7 @@ def test_BoolVariable(self): """Test BoolVariable creation""" opts = SCons.Variables.Variables() - opts.Add(SCons.Variables.BoolVariable('test', 'test option help', 0)) + opts.Add(SCons.Variables.BoolVariable('test', 'test option help', False)) o = opts.options[0] assert o.key == 'test', o.key @@ -42,7 +42,7 @@ def test_converter(self): """Test the BoolVariable converter""" opts = SCons.Variables.Variables() - opts.Add(SCons.Variables.BoolVariable('test', 'test option help', 0)) + opts.Add(SCons.Variables.BoolVariable('test', 'test option help', False)) o = opts.options[0] @@ -73,17 +73,17 @@ x = o.converter(f) assert not x, "converter returned true for '%s'" % f - caught = None + caught = False try: o.converter('x') except ValueError: - caught = 1 - assert caught, "did not catch expected ValueError" + caught = True + assert caught, "did not catch expected ValueError for 'x'" def test_validator(self): """Test the BoolVariable validator""" opts = SCons.Variables.Variables() - opts.Add(SCons.Variables.BoolVariable('test', 'test option help', 0)) + opts.Add(SCons.Variables.BoolVariable('test', 'test option help', False)) o = opts.options[0] @@ -93,23 +93,24 @@ 'N' : 'xyzzy', } + # positive checks o.validator('T', 0, env) - o.validator('F', 0, env) - caught = None + # negative checks + caught = False try: o.validator('N', 0, env) except SCons.Errors.UserError: - caught = 1 - assert caught, "did not catch expected UserError for N" + caught = True + assert caught, "did not catch expected UserError for value %s" % env['N'] - caught = None + caught = False try: o.validator('NOSUCHKEY', 0, env) except KeyError: - caught = 1 - assert caught, "did not catch expected KeyError for NOSUCHKEY" + caught = True + assert caught, "did not catch expected KeyError for 'NOSUCHKEY'" if __name__ == "__main__": diff -Nru scons-4.4.0+dfsg/SCons/Warnings.py scons-4.5.2+dfsg/SCons/Warnings.py --- scons-4.4.0+dfsg/SCons/Warnings.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SCons/Warnings.py 2023-03-21 16:17:04.000000000 +0000 @@ -46,6 +46,9 @@ class CacheWriteErrorWarning(SConsWarning): pass +class CacheCleanupErrorWarning(SConsWarning): + pass + class CorruptSConsignWarning(WarningOnByDefault): pass @@ -128,7 +131,7 @@ class DeprecatedMissingSConscriptWarning(DeprecatedWarning): pass -class ToolQtDeprecatedWarning(FutureDeprecatedWarning): +class ToolQtDeprecatedWarning(DeprecatedWarning): pass # The below is a list of 2-tuples. The first element is a class object. diff -Nru scons-4.4.0+dfsg/SConstruct scons-4.5.2+dfsg/SConstruct --- scons-4.4.0+dfsg/SConstruct 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/SConstruct 2023-03-21 16:17:04.000000000 +0000 @@ -38,7 +38,7 @@ project = 'scons' -default_version = '4.4.0' +default_version = '4.5.2' copyright = "Copyright (c) %s The SCons Foundation" % copyright_years # diff -Nru scons-4.4.0+dfsg/scripts/scons.bat scons-4.5.2+dfsg/scripts/scons.bat --- scons-4.4.0+dfsg/scripts/scons.bat 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/scripts/scons.bat 2023-03-21 16:17:04.000000000 +0000 @@ -1,37 +1,37 @@ -@REM __COPYRIGHT__ -@echo off -set SCONS_ERRORLEVEL= -if "%OS%" == "Windows_NT" goto WinNT - -@REM for 9x/Me you better not have more than 9 args -python -c "from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-__VERSION__'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons-__VERSION__'), join(sys.prefix, 'scons')] + sys.path; import SCons.Script; SCons.Script.main()" %* -@REM no way to set exit status of this script for 9x/Me -goto endscons - -@REM Credit where credit is due: we return the exit code despite our -@REM use of setlocal+endlocal using a technique from Bear's Journal: -@REM http://code-bear.com/bearlog/2007/06/01/getting-the-exit-code-from-a-batch-file-that-is-run-from-a-python-program/ - -:WinNT -setlocal -@REM ensure the script will be executed with the Python it was installed for -pushd %~dp0.. -set path=%~dp0;%CD%;%path% -popd -@REM try the script named as the .bat file in current dir, then in Scripts subdir -set scriptname=%~dp0%~n0.py -if not exist "%scriptname%" set scriptname=%~dp0Scripts\%~n0.py -@REM Handle when running from wheel where the script has no .py extension -if not exist "%scriptname%" set scriptname=%~dp0%~n0 -python "%scriptname%" %* -endlocal & set SCONS_ERRORLEVEL=%ERRORLEVEL% - -if NOT "%COMSPEC%" == "%SystemRoot%\system32\cmd.exe" goto returncode -if errorlevel 9009 echo you do not have python in your PATH -goto endscons - -:returncode -exit /B %SCONS_ERRORLEVEL% - -:endscons -call :returncode %SCONS_ERRORLEVEL% +@REM __COPYRIGHT__ +@echo off +set SCONS_ERRORLEVEL= +if "%OS%" == "Windows_NT" goto WinNT + +@REM for 9x/Me you better not have more than 9 args +python -c "from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-__VERSION__'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons-__VERSION__'), join(sys.prefix, 'scons')] + sys.path; import SCons.Script; SCons.Script.main()" %* +@REM no way to set exit status of this script for 9x/Me +goto endscons + +@REM Credit where credit is due: we return the exit code despite our +@REM use of setlocal+endlocal using a technique from Bear's Journal: +@REM http://code-bear.com/bearlog/2007/06/01/getting-the-exit-code-from-a-batch-file-that-is-run-from-a-python-program/ + +:WinNT +setlocal +@REM ensure the script will be executed with the Python it was installed for +pushd %~dp0.. +set path=%~dp0;%CD%;%path% +popd +@REM try the script named as the .bat file in current dir, then in Scripts subdir +set scriptname=%~dp0%~n0.py +if not exist "%scriptname%" set scriptname=%~dp0Scripts\%~n0.py +@REM Handle when running from wheel where the script has no .py extension +if not exist "%scriptname%" set scriptname=%~dp0%~n0 +python "%scriptname%" %* +endlocal & set SCONS_ERRORLEVEL=%ERRORLEVEL% + +if NOT "%COMSPEC%" == "%SystemRoot%\system32\cmd.exe" goto returncode +if errorlevel 9009 echo you do not have python in your PATH +goto endscons + +:returncode +exit /B %SCONS_ERRORLEVEL% + +:endscons +call :returncode %SCONS_ERRORLEVEL% diff -Nru scons-4.4.0+dfsg/setup.cfg scons-4.5.2+dfsg/setup.cfg --- scons-4.4.0+dfsg/setup.cfg 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/setup.cfg 2023-03-21 16:17:04.000000000 +0000 @@ -31,6 +31,8 @@ Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 Environment :: Console Intended Audience :: Developers License :: OSI Approved :: MIT License diff -Nru scons-4.4.0+dfsg/site_scons/scons_local_package.py scons-4.5.2+dfsg/site_scons/scons_local_package.py --- scons-4.4.0+dfsg/site_scons/scons_local_package.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/site_scons/scons_local_package.py 2023-03-21 16:17:04.000000000 +0000 @@ -23,22 +23,23 @@ from glob import glob import os.path -from zip_utils import zipit +from zip_utils import zipit, zipappit from Utilities import is_windows def get_local_package_file_list(): - """ - Get list of all files which should be included in scons-local package - """ + """Get list of all files which should be included in scons-local package""" + s_files = glob("SCons/**", recursive=True) # import pdb; pdb.set_trace() non_test = [f for f in s_files if "Tests.py" not in f] - non_test_non_doc = [f for f in non_test if '.xml' not in f or "SCons/Tool/docbook" in f] + non_test_non_doc = [ + f for f in non_test if '.xml' not in f or "SCons/Tool/docbook" in f + ] filtered_list = [f for f in non_test_non_doc if 'pyc' not in f] - filtered_list = [f for f in filtered_list if '__pycache__' not in f ] + filtered_list = [f for f in filtered_list if '__pycache__' not in f] filtered_list = [f for f in filtered_list if not os.path.isdir(f)] return filtered_list @@ -50,47 +51,71 @@ files = get_local_package_file_list() target_dir = '#/build/scons-local/scons-local-$VERSION' for f in files: - all_local_installed.extend(env.Install(os.path.join(target_dir, os.path.dirname(f)), - f)) - - basedir_files = ['scripts/scons.bat', - 'scripts/scons.py', - 'scripts/scons-configure-cache.py', - 'scripts/sconsign.py', - 'bin/scons-time.py'] + all_local_installed.extend( + env.Install(os.path.join(target_dir, os.path.dirname(f)), f) + ) + + basedir_files = [ + 'scripts/scons.bat', + 'scripts/scons.py', + 'scripts/scons-configure-cache.py', + 'scripts/sconsign.py', + 'bin/scons-time.py', + ] for bf in basedir_files: fn = os.path.basename(bf) - all_local_installed.append(env.SCons_revision('#/build/scons-local/%s'%fn, bf)) + all_local_installed.append( + env.SCons_revision(f'#/build/scons-local/{fn}', bf) + ) # Now copy manpages into scons-local package - built_manpage_files = env.Glob('build/doc/man/*.1') + built_manpage_files = env.Glob('build/doc/man/*.1') for bmp in built_manpage_files: fn = os.path.basename(str(bmp)) - all_local_installed.append(env.SCons_revision('#/build/scons-local/%s'%fn, bmp)) - - rename_files = [('scons-${VERSION}.bat', 'scripts/scons.bat'), - ('scons-README', 'README-local'), - ('scons-LICENSE', 'LICENSE-local')] + all_local_installed.append( + env.SCons_revision(f'#/build/scons-local/{fn}', bmp) + ) + + rename_files = [ + ('scons-${VERSION}.bat', 'scripts/scons.bat'), + ('scons-README', 'README-local'), + ('scons-LICENSE', 'LICENSE-local'), + ] for t, f in rename_files: - target_file = "#/build/scons-local/%s"%t + target_file = f"#/build/scons-local/{t}" all_local_installed.append(env.SCons_revision(target_file, f)) return all_local_installed def create_local_packages(env): - # Add SubstFile builder - env.Tool('textfile') [env.Tool(x) for x in ['packaging', 'filesystem', 'zip']] installed_files = install_local_package_files(env) build_local_dir = 'build/scons-local' - package = env.Command('#build/dist/scons-local-${VERSION}.zip', - installed_files, - zipit, - CD=build_local_dir, - PSV='.', - ) + package = env.Command( + '#build/dist/scons-local-${VERSION}.zip', + installed_files, + zipit, + CD=build_local_dir, + PSV='.', + ) + + do_zipapp = True # Q: maybe an external way to specify whether to build? + if do_zipapp: + # We need to descend into the versioned directory for zipapp, + # but we don't know the version. env.Glob lets us expand that. + # The action isn't going to use the sources, but including + # them makes sure SCons has populated the dir we're going to zip. + app_dir = env.Glob(f"{build_local_dir}/scons-local-*")[0] + zipapp = env.Command( + target='#build/dist/scons-local-${VERSION}.pyz', + source=installed_files, + action=zipappit, + CD=app_dir, + PSV='.', + entry='SCons.Script.Main:main', + ) if is_windows(): # avoid problem with tar interpreting c:/ as a remote machine @@ -98,13 +123,12 @@ else: tar_cargs = '-czf' - env.Command('#build/dist/scons-local-${VERSION}.tar.gz', - installed_files, - "cd %s && tar %s $( ${TARGET.abspath} $) *" % (build_local_dir, tar_cargs)) - - print("Package:%s"%package) - - - - - + env.Command( + '#build/dist/scons-local-${VERSION}.tar.gz', + installed_files, + "cd %s && tar %s $( ${TARGET.abspath} $) *" % (build_local_dir, tar_cargs), + ) + + print(f"Package:{package}") + if do_zipapp: + print(f"Zipapp:{zipapp}") diff -Nru scons-4.4.0+dfsg/site_scons/zip_utils.py scons-4.5.2+dfsg/site_scons/zip_utils.py --- scons-4.4.0+dfsg/site_scons/zip_utils.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/site_scons/zip_utils.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,54 +1,101 @@ +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Actions to zip and unzip, for working with SCons release bundles +Action for creating a zipapp +""" + import os.path +import zipfile +import zipapp -zcat = 'gzip -d -c' +def zipit(env, target, source): + """Action function to zip *source* into *target* -# -# Figure out if we can handle .zip files. -# -zipit = None -unzipit = None -try: - import zipfile - - def zipit(env, target, source): - print("Zipping %s:" % str(target[0])) - def visit(arg, dirname, filenames): - for filename in filenames: - path = os.path.join(dirname, filename) - if os.path.isfile(path): - arg.write(path) - # default ZipFile compression is ZIP_STORED - zf = zipfile.ZipFile(str(target[0]), 'w', compression=zipfile.ZIP_DEFLATED) - olddir = os.getcwd() - os.chdir(env.Dir(env['CD']).abspath) - try: - for dirname, dirnames, filenames in os.walk(env['PSV']): - visit(zf, dirname, filenames) - finally: - os.chdir(olddir) - zf.close() - - def unzipit(env, target, source): - print("Unzipping %s:" % str(source[0])) - zf = zipfile.ZipFile(str(source[0]), 'r') - for name in zf.namelist(): - dest = os.path.join(env['UNPACK_ZIP_DIR'], name) - dir = os.path.dirname(dest) - try: - os.makedirs(dir) - except: - pass - print(dest,name) - # if the file exists, then delete it before writing - # to it so that we don't end up trying to write to a symlink: - if os.path.isfile(dest) or os.path.islink(dest): - os.unlink(dest) - if not os.path.isdir(dest): - with open(dest, 'wb') as fp: - fp.write(zf.read(name)) - -except ImportError: - if unzip and zip: - zipit = "cd $CD && $ZIP $ZIPFLAGS $( ${TARGET.abspath} $) $PSV" - unzipit = "$UNZIP $UNZIPFLAGS $SOURCES" + Values extracted from *env*: + *CD*: the directory to work in + *PSV*: the directory name to walk down to find the files + """ + print(f"Zipping {target[0]}:") + + def visit(arg, dirname, filenames): + for filename in filenames: + path = os.path.join(dirname, filename) + if os.path.isfile(path): + arg.write(path) + + # default ZipFile compression is ZIP_STORED + zf = zipfile.ZipFile(str(target[0]), 'w', compression=zipfile.ZIP_DEFLATED) + olddir = os.getcwd() + os.chdir(env.Dir(env['CD']).abspath) + try: + for dirname, dirnames, filenames in os.walk(env['PSV']): + visit(zf, dirname, filenames) + finally: + os.chdir(olddir) + zf.close() + + +def unzipit(env, target, source): + """Action function to unzip *source*""" + + print(f"Unzipping {source[0]}:") + zf = zipfile.ZipFile(str(source[0]), 'r') + for name in zf.namelist(): + dest = os.path.join(env['UNPACK_ZIP_DIR'], name) + dir = os.path.dirname(dest) + os.makedirs(dir, exist_ok=True) + print(dest, name) + # if the file exists, then delete it before writing + # to it so that we don't end up trying to write to a symlink: + if os.path.isfile(dest) or os.path.islink(dest): + os.unlink(dest) + if not os.path.isdir(dest): + with open(dest, 'wb') as fp: + fp.write(zf.read(name)) + + +def zipappit(env, target, source): + """Action function to Create a zipapp *target* from specified directory. + + Values extracted from *env*: + *CD*: the Dir node for the place we want to work. + *PSV*: the directory name to point :meth:`zipapp.create_archive` at + (note *source* is unused here in favor of PSV) + *main*: the entry point for the zipapp + """ + print(f"Creating zipapp {target[0]}:") + dest = target[0].abspath + olddir = os.getcwd() + os.chdir(env['CD'].abspath) + try: + zipapp.create_archive( + source=env['PSV'], + target=dest, + main=env['entry'], + interpreter="/usr/bin/env python", + ) + finally: + os.chdir(olddir) diff -Nru scons-4.4.0+dfsg/template/RELEASE.txt scons-4.5.2+dfsg/template/RELEASE.txt --- scons-4.4.0+dfsg/template/RELEASE.txt 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/template/RELEASE.txt 2023-03-21 16:17:04.000000000 +0000 @@ -1,10 +1,17 @@ -A new SCons release, 4.1.0, is now available -on the SCons download page: +If you are reading this in the git repository, the contents +refer to *unreleased* changes since the last SCons release. +Past official release announcements appear at: - https://scons.org/pages/download.html + https://scons.org/tag/releases.html +================================================================== -Here is a summary of the changes since 4.1.0: +A new SCons release, 4.4.1, is now available on the SCons download page: + + https://scons.org/pages/download.html + + +Here is a summary of the changes since 4.4.0: NEW FUNCTIONALITY ----------------- diff -Nru scons-4.4.0+dfsg/test/Actions/actions.py scons-4.5.2+dfsg/test/Actions/actions.py --- scons-4.4.0+dfsg/test/Actions/actions.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Actions/actions.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import TestSCons @@ -40,6 +39,7 @@ test.write('SConstruct', """ B = Builder(action = r'%(_python_)s build.py $TARGET 1 $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) env.B(target = 'foo.out', source = 'foo.in') """ % locals()) @@ -54,6 +54,7 @@ test.write('SConstruct', """ B = Builder(action = r'%(_python_)s build.py $TARGET 2 $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) env.B(target = 'foo.out', source = 'foo.in') """ % locals()) @@ -73,6 +74,7 @@ cp = subprocess.run(cmd, shell=True) return cp.returncode B = Builder(action = func) +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) env.B(target = 'foo.out', source = 'foo.in') """ % locals()) @@ -98,6 +100,7 @@ return self.cmd %% (' '.join(map(str, target)), ' '.join(map(str, source))) B = Builder(action = bld()) +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) env.B(target = 'foo.out', source = 'foo.in') """ % locals()) @@ -112,6 +115,8 @@ test.write('SConstruct', """\ def func(env, target, source): pass + +DefaultEnvironment(tools=[]) # test speedup env = Environment(S = Action('foo'), F = Action(func), L = Action(['arg1', 'arg2'])) diff -Nru scons-4.4.0+dfsg/test/Alias/action.py scons-4.5.2+dfsg/test/Alias/action.py --- scons-4.4.0+dfsg/test/Alias/action.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Alias/action.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,14 +22,11 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# """ Test that Aliases with actions work. """ -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" - import TestSCons test = TestSCons.TestSCons() @@ -52,6 +51,7 @@ with open('bar', 'wb') as f: f.write(bytearray("bar(%s, %s)\\n" % (target, source),'utf-8')) +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = {'Cat':Builder(action=cat)}) env.Alias(target = ['build-f1'], source = 'f1.out', action = foo) f1 = env.Cat('f1.out', 'f1.in') diff -Nru scons-4.4.0+dfsg/test/Alias/Alias.py scons-4.5.2+dfsg/test/Alias/Alias.py --- scons-4.4.0+dfsg/test/Alias/Alias.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Alias/Alias.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import os @@ -44,6 +43,7 @@ test.write('SConstruct', """ B = Builder(action = r'%(_python_)s build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment() env['BUILDERS']['B'] = B env.B(target = 'f1.out', source = 'f1.in') @@ -136,6 +136,7 @@ test.write('SConstruct', """ Decider('content') B = Builder(action = r'%(_python_)s build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment() env['BUILDERS']['B'] = B env.B(target = 'f1.out', source = 'f1.in') diff -Nru scons-4.4.0+dfsg/test/Alias/Depends.py scons-4.5.2+dfsg/test/Alias/Depends.py --- scons-4.4.0+dfsg/test/Alias/Depends.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Alias/Depends.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import os @@ -44,6 +43,7 @@ test.write('SConstruct', """ B = Builder(action = r'%(_python_)s build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment() env['BUILDERS']['B'] = B env.B(target = 'f1.out', source = 'f1.in') diff -Nru scons-4.4.0+dfsg/test/Batch/changed_sources_main.cpp scons-4.5.2+dfsg/test/Batch/changed_sources_main.cpp --- scons-4.4.0+dfsg/test/Batch/changed_sources_main.cpp 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Batch/changed_sources_main.cpp 2023-03-21 16:17:04.000000000 +0000 @@ -1,7 +1,7 @@ - -#include - -int main() -{ - std::cout << "Hello, world!\n"; -} + +#include + +int main() +{ + std::cout << "Hello, world!\n"; +} diff -Nru scons-4.4.0+dfsg/test/builderrors.py scons-4.5.2+dfsg/test/builderrors.py --- scons-4.4.0+dfsg/test/builderrors.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/builderrors.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -21,8 +23,6 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" - import os import TestSCons @@ -45,6 +45,7 @@ test.write(['one', 'SConstruct'], """ B0 = Builder(action = r'%(_python_)s ../build.py 0 $TARGET $SOURCES') B1 = Builder(action = r'%(_python_)s ../build.py 1 $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B0' : B0, 'B1' : B1 }) env.B1(target = 'f1.out', source = 'f1.in') env.B0(target = 'f2.out', source = 'f2.in') @@ -65,6 +66,7 @@ test.write(['two', 'SConstruct'], """ B0 = Builder(action = r'%(_python_)s ../build.py 0 $TARGET $SOURCES') B1 = Builder(action = r'%(_python_)s ../build.py 1 $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B0': B0, 'B1' : B1 }) env.B0(target = 'f1.out', source = 'f1.in') env.B1(target = 'f2.out', source = 'f2.in') @@ -85,6 +87,7 @@ test.write(['three', 'SConstruct'], """ B0 = Builder(action = r'%(_python_)s ../build.py 0 $TARGET $SOURCES') B1 = Builder(action = r'%(_python_)s ../build.py 1 $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B0' : B0, 'B1': B1 }) env.B0(target = 'f1.out', source = 'f1.in') env.B0(target = 'f2.out', source = 'f2.in') @@ -103,6 +106,7 @@ test.fail_test(os.path.exists(test.workpath('f3.out'))) test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env=Environment() env['ENV']['PATH'] = '' env.Command(target='foo.out', source=[], action='not_a_program') @@ -116,6 +120,7 @@ # but that shouldn't cause a scons traceback. long_cmd = 'xyz ' + "foobarxyz" * 100000 test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env=Environment() env.Command(target='longcmd.out', source=[], action='echo %s') """%long_cmd) @@ -137,6 +142,7 @@ # This will also give an exit status not in exitvalmap, # with error "Permission denied" or "No such file or directory". test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env=Environment() env['SHELL'] = 'one' env.Command(target='badshell.out', source=[], action='foo') @@ -156,6 +162,7 @@ # Should not give traceback. test.write('SConstruct', """ import os +DefaultEnvironment(tools=[]) # test speedup env = Environment(ENV = os.environ) env.Command('dummy.txt', None, ['python -c "import sys; sys.exit(-1)"']) """) @@ -170,6 +177,7 @@ test.write('SConstruct', """ import atexit +DefaultEnvironment(tools=[]) # test speedup env = Environment() env2 = env.Clone() @@ -190,6 +198,7 @@ # Bug #1053: Alias is called "all", but default is the File "all" test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env = Environment() env.Default("all") env.Alias("all", env.Install("dir", "file.txt")) diff -Nru scons-4.4.0+dfsg/test/CacheDir/option--cs.py scons-4.5.2+dfsg/test/CacheDir/option--cs.py --- scons-4.4.0+dfsg/test/CacheDir/option--cs.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/CacheDir/option--cs.py 2023-03-21 16:17:04.000000000 +0000 @@ -54,7 +54,6 @@ cache = test.workpath('cache') test.write(['src1', 'SConstruct'], """ -DefaultEnvironment(tools=[]) def cat(env, source, target): target = str(target[0]) with open('cat.out', 'a') as f: @@ -63,6 +62,8 @@ for src in source: with open(str(src), "r") as f2: f.write(f2.read()) + +DefaultEnvironment(tools=[]) # test speedup env = Environment(tools=[], BUILDERS={'Internal':Builder(action=cat), 'External':Builder(action=r'%(_python_)s build.py $TARGET $SOURCES')}) @@ -156,6 +157,7 @@ src2_hello = test.workpath('src2', hello_exe) test.write(['src2', 'SConstruct'], """ +DefaultEnvironment(tools=[]) # test speedup env = Environment() env.Program('hello.c') CacheDir(r'%s') diff -Nru scons-4.4.0+dfsg/test/Chmod.py scons-4.5.2+dfsg/test/Chmod.py --- scons-4.4.0+dfsg/test/Chmod.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Chmod.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify that the Chmod() Action works. @@ -43,26 +42,36 @@ Execute(Chmod(('f1-File'), 0o666)) Execute(Chmod('d2', 0o777)) Execute(Chmod(Dir('d2-Dir'), 0o777)) + def cat(env, source, target): target = str(target[0]) with open(target, "wb") as f: for src in source: with open(str(src), "rb") as infp: f.write(infp.read()) + Cat = Action(cat) env = Environment() -env.Command('bar.out', 'bar.in', [Cat, - Chmod("f3", 0o666), - Chmod("d4", 0o777)]) -env = Environment(FILE = 'f5') +env.Command( + 'bar.out', + 'bar.in', + [Cat, Chmod("f3", 0o666), Chmod("d4", 0o777)], +) +env = Environment(FILE='f5') env.Command('f6.out', 'f6.in', [Chmod('$FILE', 0o666), Cat]) -env.Command('f7.out', 'f7.in', [Cat, - Chmod('Chmod-$SOURCE', 0o666), - Chmod('${TARGET}-Chmod', 0o666)]) +env.Command( + 'f7.out', + 'f7.in', + [Cat, Chmod('Chmod-$SOURCE', 0o666), Chmod('${TARGET}-Chmod', 0o666)], +) # Make sure Chmod works with a list of arguments -env = Environment(FILE = 'f9') -env.Command('f8.out', 'f8.in', [Chmod(['$FILE', File('f10')], 0o666), Cat]) +env = Environment(FILE='f9') +env.Command( + 'f8.out', + 'f8.in', + [Chmod(['$FILE', File('f10')], 0o666), Cat], +) Execute(Chmod(['d11', Dir('d12')], 0o777)) Execute(Chmod('f13', "a=r")) Execute(Chmod('f14', "ogu+w")) @@ -117,28 +126,30 @@ os.chmod(test.workpath('d16'), 0o555) os.chmod(test.workpath('d17'), 0o555) os.chmod(test.workpath('d18'), 0o555) -expect = test.wrap_stdout(read_str = """\ -Chmod("f1", 0666) -Chmod("f1-File", 0666) -Chmod("d2", 0777) -Chmod("d2-Dir", 0777) -Chmod(["d11", "d12"], 0777) + +expect = test.wrap_stdout( + read_str = """\ +Chmod("f1", 0o666) +Chmod("f1-File", 0o666) +Chmod("d2", 0o777) +Chmod("d2-Dir", 0o777) +Chmod(["d11", "d12"], 0o777) Chmod("f13", "a=r") Chmod("f14", "ogu+w") Chmod("f15", "ug=rw, go+ rw") Chmod("d16", "0777") Chmod(["d17", "d18"], "ogu = rwx") """, - build_str = """\ + build_str = """\ cat(["bar.out"], ["bar.in"]) -Chmod("f3", 0666) -Chmod("d4", 0777) -Chmod("f5", 0666) +Chmod("f3", 0o666) +Chmod("d4", 0o777) +Chmod("f5", 0o666) cat(["f6.out"], ["f6.in"]) cat(["f7.out"], ["f7.in"]) -Chmod("Chmod-f7.in", 0666) -Chmod("f7.out-Chmod", 0666) -Chmod(["f9", "f10"], 0666) +Chmod("Chmod-f7.in", 0o666) +Chmod("f7.out-Chmod", 0o666) +Chmod(["f9", "f10"], 0o666) cat(["f8.out"], ["f8.in"]) """) test.run(options = '-n', arguments = '.', stdout = expect) diff -Nru scons-4.4.0+dfsg/test/CompilationDatabase/basic.py scons-4.5.2+dfsg/test/CompilationDatabase/basic.py --- scons-4.4.0+dfsg/test/CompilationDatabase/basic.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/CompilationDatabase/basic.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,7 +22,7 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# + """ Test CompilationDatabase and several variations of ways to call it and values of COMPILATIONDB_USE_ABSPATH @@ -46,7 +48,7 @@ 'compile_commands_target.json', 'compile_commands.json', 'compile_commands_over_rel.json', - 'compile_commands_over_abs_0.json' + 'compile_commands_over_abs_0.json', ] abs_files = [ @@ -63,7 +65,8 @@ "file": "test_main.c", "output": "test_main.o" } -]""" % (sys.executable, test.workdir) +] +""" % (sys.executable, test.workdir) if sys.platform == 'win32': example_rel_file = example_rel_file.replace('\\', '\\\\') @@ -80,7 +83,8 @@ "file": "%s", "output": "%s" } -]""" % (sys.executable, test.workdir, os.path.join(test.workdir, 'test_main.c'), os.path.join(test.workdir, 'test_main.o')) +] +""" % (sys.executable, test.workdir, os.path.join(test.workdir, 'test_main.c'), os.path.join(test.workdir, 'test_main.o')) if sys.platform == 'win32': example_abs_file = example_abs_file.replace('\\', '\\\\') diff -Nru scons-4.4.0+dfsg/test/CompilationDatabase/fixture/SConstruct scons-4.5.2+dfsg/test/CompilationDatabase/fixture/SConstruct --- scons-4.4.0+dfsg/test/CompilationDatabase/fixture/SConstruct 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/CompilationDatabase/fixture/SConstruct 2023-03-21 16:17:04.000000000 +0000 @@ -7,7 +7,7 @@ LINKFLAGS=[], CC='$PYTHON mygcc.py cc', CXX='$PYTHON mygcc.py c++', - tools=['gcc','g++','gnulink'], + tools=['gcc', 'g++', 'gnulink'], ) env.Tool('compilation_db') diff -Nru scons-4.4.0+dfsg/test/CompilationDatabase/fixture/SConstruct_tempfile scons-4.5.2+dfsg/test/CompilationDatabase/fixture/SConstruct_tempfile --- scons-4.4.0+dfsg/test/CompilationDatabase/fixture/SConstruct_tempfile 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/CompilationDatabase/fixture/SConstruct_tempfile 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,29 @@ +import sys + +DefaultEnvironment(tools=[]) +env = Environment( + PYTHON=sys.executable, + LINK='$PYTHON mylink.py', + LINKFLAGS=[], + CC='$PYTHON mygcc.py cc', + tools=['gcc'], + MAXLINELENGTH=10, + OBJSUFFIX='.o', +) + +# make sure TempFileMunge is used +if 'TEMPFILE' not in env['CCCOM']: + env['CCCOM'] = '${TEMPFILE("%s")}' % (env['CCCOM']) + +env.Tool('compilation_db') + +outputs = [] + +# Should be relative paths +outputs += env.CompilationDatabase('compile_commands_only_arg.json') + +env.Object('test_main.c') + +# Prevent actual call of $PYTHON @tempfile since "mygcc.py cc ..." is not a proper python statement +# Interesting outputs are json databases +env.Default(outputs) diff -Nru scons-4.4.0+dfsg/test/CompilationDatabase/TEMPFILE.py scons-4.5.2+dfsg/test/CompilationDatabase/TEMPFILE.py --- scons-4.4.0+dfsg/test/CompilationDatabase/TEMPFILE.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/CompilationDatabase/TEMPFILE.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,66 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Test that CompilationDatabase works when TEMPFILE is being used to handle long +commandlines for compilers/linkers/etc +""" + +import sys +import os +import os.path +import TestSCons + +test = TestSCons.TestSCons() + +test.file_fixture('mygcc.py') +test.file_fixture('test_main.c') +test.file_fixture('fixture/SConstruct_tempfile', 'SConstruct') + +test.run() + +rel_files = [ + 'compile_commands_only_arg.json', +] + +example_rel_file = """[ + { + "command": "%s mygcc.py cc -o test_main.o -c test_main.c", + "directory": "%s", + "file": "test_main.c", + "output": "test_main.o" + } +] +""" % (sys.executable, test.workdir) + +if sys.platform == 'win32': + example_rel_file = example_rel_file.replace('\\', '\\\\') + +for f in rel_files: + # print("Checking:%s" % f) + test.must_exist(f) + test.must_match(f, example_rel_file, mode='r') + +test.pass_test() diff -Nru scons-4.4.0+dfsg/test/CompilationDatabase/variant_dir.py scons-4.5.2+dfsg/test/CompilationDatabase/variant_dir.py --- scons-4.4.0+dfsg/test/CompilationDatabase/variant_dir.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/CompilationDatabase/variant_dir.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,7 +22,7 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# + """ Test CompilationDatabase and several variations of ways to call it and values of COMPILATIONDB_USE_ABSPATH @@ -77,7 +79,8 @@ "file": "%(src_file)s", "output": "%(output2_file)s" } -]""" % {'exe': sys.executable, +] +""" % {'exe': sys.executable, 'workdir': test.workdir, 'src_file': os.path.join('src', 'test_main.c'), 'output_file': os.path.join('build', 'test_main.o'), @@ -106,7 +109,8 @@ "file": "%(abs_src_file)s", "output": "%(abs_output2_file)s" } -]""" % {'exe': sys.executable, +] +""" % {'exe': sys.executable, 'workdir': test.workdir, 'src_file': os.path.join('src', 'test_main.c'), 'abs_src_file': os.path.join(test.workdir, 'src', 'test_main.c'), @@ -130,7 +134,8 @@ "file": "%(src_file)s", "output": "%(output_file)s" } -]""" % {'exe': sys.executable, +] +""" % {'exe': sys.executable, 'workdir': test.workdir, 'src_file': os.path.join('src', 'test_main.c'), 'output_file': os.path.join('build', 'test_main.o'), @@ -151,7 +156,8 @@ "file": "%(src_file)s", "output": "%(output2_file)s" } -]""" % {'exe': sys.executable, +] +""" % {'exe': sys.executable, 'workdir': test.workdir, 'src_file': os.path.join('src', 'test_main.c'), 'output2_file': os.path.join('build2', 'test_main.o'), diff -Nru scons-4.4.0+dfsg/test/Configure/conftest_source_file/header1.h scons-4.5.2+dfsg/test/Configure/conftest_source_file/header1.h --- scons-4.4.0+dfsg/test/Configure/conftest_source_file/header1.h 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/Configure/conftest_source_file/header1.h 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,2 @@ +#pragma once +#include "header2.h" \ No newline at end of file diff -Nru scons-4.4.0+dfsg/test/Configure/conftest_source_file/header2.h scons-4.5.2+dfsg/test/Configure/conftest_source_file/header2.h --- scons-4.4.0+dfsg/test/Configure/conftest_source_file/header2.h 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/Configure/conftest_source_file/header2.h 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,2 @@ +#pragma once +int test_header = 1; \ No newline at end of file diff -Nru scons-4.4.0+dfsg/test/Configure/conftest_source_file/header3.h scons-4.5.2+dfsg/test/Configure/conftest_source_file/header3.h --- scons-4.4.0+dfsg/test/Configure/conftest_source_file/header3.h 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/Configure/conftest_source_file/header3.h 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,2 @@ +#pragma once +int test_header = 3; \ No newline at end of file diff -Nru scons-4.4.0+dfsg/test/Configure/conftest_source_file/main.c scons-4.5.2+dfsg/test/Configure/conftest_source_file/main.c --- scons-4.4.0+dfsg/test/Configure/conftest_source_file/main.c 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/Configure/conftest_source_file/main.c 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,2 @@ +#include "header1.h" +int main(){return 0;} \ No newline at end of file diff -Nru scons-4.4.0+dfsg/test/Configure/conftest_source_file/SConstruct scons-4.5.2+dfsg/test/Configure/conftest_source_file/SConstruct --- scons-4.4.0+dfsg/test/Configure/conftest_source_file/SConstruct 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/Configure/conftest_source_file/SConstruct 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,8 @@ +DefaultEnvironment(tools=[]) +env = Environment() +env.Append(CPPPATH=['.']) +conf1 = Configure(env) +conf1.CheckHeader("header1.h") +conf1.CheckHeader("header3.h") +conf1.Finish() +env.Program('out', 'main.c') \ No newline at end of file diff -Nru scons-4.4.0+dfsg/test/Configure/conftest_source_file.py scons-4.5.2+dfsg/test/Configure/conftest_source_file.py --- scons-4.4.0+dfsg/test/Configure/conftest_source_file.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/Configure/conftest_source_file.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,69 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Template for end-to-end test file. +Replace this with a description of the test. +""" + +import textwrap +import os + +import TestSCons + +test = TestSCons.TestSCons() + +test.dir_fixture("conftest_source_file") + +test.run(arguments='.') + +conf_text = textwrap.dedent("""\ + Checking for C header file header1.h... {arg1}yes + Checking for C header file header3.h... {arg2}yes +""") + +test.up_to_date(read_str=conf_text.format(arg1='(cached) ', arg2='(cached) ')) + +test.write('header2.h', """ +#pragma once +int test_header = 2; +""") + +test.not_up_to_date(read_str=conf_text.format(arg1='(cached) ', arg2='(cached) ')) + +test.up_to_date(read_str=conf_text.format(arg1='', arg2='(cached) ')) +os.environ['SCONSFLAGS'] = '--config=force' +test.up_to_date(read_str=conf_text.format(arg1='', arg2='')) +os.environ['SCONSFLAGS'] = '' + +test.up_to_date(read_str=conf_text.format(arg1='(cached) ', arg2='(cached) ')) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/Configure/issue-3469/fixture/SConstruct scons-4.5.2+dfsg/test/Configure/issue-3469/fixture/SConstruct --- scons-4.4.0+dfsg/test/Configure/issue-3469/fixture/SConstruct 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Configure/issue-3469/fixture/SConstruct 2023-03-21 16:17:04.000000000 +0000 @@ -1,3 +1,7 @@ +# SPDX-License-Identifier: MIT +# +# Copyright The SCons Foundation + """ This tests if we add/remove a test in between other tests if a rerun will properly cache the results. Github issue #3469 @@ -6,7 +10,7 @@ DefaultEnvironment(tools=[]) vars = Variables() -vars.Add(BoolVariable('SKIP', 'Skip Middle Conf test', 0)) +vars.Add(BoolVariable('SKIP', 'Skip Middle Conf test', False)) env = Environment(variables=vars) conf = Configure(env) diff -Nru scons-4.4.0+dfsg/test/Copy-Action.py scons-4.5.2+dfsg/test/Copy-Action.py --- scons-4.4.0+dfsg/test/Copy-Action.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Copy-Action.py 2023-03-21 16:17:04.000000000 +0000 @@ -40,6 +40,10 @@ Execute(Copy('f1.out', 'f1.in')) Execute(Copy(File('d2.out'), 'd2.in')) Execute(Copy('d3.out', File('f3.in'))) +# Issue #3009: make sure it's not mangled if src is a list. +# make sure both list-of-str and list-of-Node work +Execute(Copy('d7.out', ['f10.in', 'f11.in'])) +Execute(Copy('d7.out', Glob('f?.in'))) def cat(env, source, target): target = str(target[0]) @@ -71,6 +75,7 @@ test.write(['d2.in', 'file'], "d2.in/file\n") test.write('f3.in', "f3.in\n") test.subdir('d3.out') +test.subdir('d7.out') test.write('bar.in', "bar.in\n") test.write('f4.in', "f4.in\n") test.subdir('d5.in') @@ -101,6 +106,8 @@ Copy("f1.out", "f1.in") Copy("d2.out", "d2.in") Copy("d3.out", "f3.in") +Copy("d7.out", ["f10.in", "f11.in"]) +Copy("d7.out", ["f1.in", "f3.in", "f4.in", "f6.in", "f7.in", "f8.in", "f9.in"]) """, build_str="""\ cat(["bar.out"], ["bar.in"]) @@ -123,6 +130,8 @@ test.must_not_exist('f1.out') test.must_not_exist('d2.out') test.must_not_exist(os.path.join('d3.out', 'f3.in')) +test.must_not_exist(os.path.join('d7.out', 'f7.in')) +test.must_not_exist(os.path.join('d7.out', 'f11.in')) test.must_not_exist('f4.out') test.must_not_exist('d5.out') test.must_not_exist(os.path.join('d6.out', 'f6.in')) @@ -141,6 +150,8 @@ test.must_match('f1.out', "f1.in\n", mode='r') test.must_match(['d2.out', 'file'], "d2.in/file\n", mode='r') test.must_match(['d3.out', 'f3.in'], "f3.in\n", mode='r') +test.must_match(['d7.out', 'f7.in'], "f7.in\n", mode='r') +test.must_match(['d7.out', 'f11.in'], "f11.in\n", mode='r') test.must_match('f4.out', "f4.in\n", mode='r') test.must_match(['d5.out', 'file'], "d5.in/file\n", mode='r') test.must_match(['d6.out', 'f6.in'], "f6.in\n", mode='r') diff -Nru scons-4.4.0+dfsg/test/Copy-Option.py scons-4.5.2+dfsg/test/Copy-Option.py --- scons-4.4.0+dfsg/test/Copy-Option.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Copy-Option.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test that setting Variables in an Environment doesn't prevent the @@ -37,20 +36,22 @@ gpib_options = ['NI_GPIB', 'NI_ENET'] gpib_include = '/' -#0.96 broke copying ListVariables ??? +# 0.96 broke copying ListVariables ??? opts = Variables('config.py', ARGUMENTS) opts.AddVariables( - BoolVariable('gpib', 'enable gpib support', 1), - ListVariable('gpib_options', + BoolVariable('gpib', 'enable gpib support', True), + ListVariable( + 'gpib_options', 'whether and what kind of gpib support shall be enabled', 'all', - gpib_options), - ) -env = Environment(options = opts, CPPPATH = ['#/']) -new_env=env.Clone() + gpib_options, + ), +) +env = Environment(options=opts, CPPPATH=['#/']) +new_env = env.Clone() """) -test.run(arguments = '.') +test.run(arguments='.') test.pass_test() diff -Nru scons-4.4.0+dfsg/test/CPPDEFINES/append.py scons-4.5.2+dfsg/test/CPPDEFINES/append.py --- scons-4.4.0+dfsg/test/CPPDEFINES/append.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/CPPDEFINES/append.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify appending to CPPPDEFINES with various data types. @@ -33,203 +32,315 @@ test = TestSCons.TestSCons() -# Note: we explicitly set CPPDEFPREFIX here to simplify testing on -# Windows. - -test.write('SConstruct', """\ -env_1738_2 = Environment(CPPDEFPREFIX='-D') -env_1738_2['CPPDEFINES'] = ['FOO'] -env_1738_2.Append(CPPDEFINES={'value' : '1'}) -print(env_1738_2.subst('$_CPPDEFFLAGS')) -#env_1738_2.Object('test_1738_2', 'main.c') - -# https://github.com/SCons/scons/issues/2300 -env_2300_1 = Environment(CPPDEFINES = 'foo', CPPDEFPREFIX='-D') -env_2300_1.Append(CPPDEFINES='bar') -print(env_2300_1.subst('$_CPPDEFFLAGS')) - -env_2300_2 = Environment(CPPDEFINES = ['foo'], CPPDEFPREFIX='-D') # note the list -env_2300_2.Append(CPPDEFINES='bar') -print(env_2300_2.subst('$_CPPDEFFLAGS')) - -# https://github.com/SCons/scons/issues/1152 -# https://github.com/SCons/scons/issues/2900 -# Python3 dicts dont preserve order. Hence we supply subclass of OrderedDict -# whose __str__ and __repr__ act like a normal dict. -from collections import OrderedDict -class OrderedPrintingDict(OrderedDict): - def __repr__(self): - return '{' + ', '.join(['%r: %r'%(k, v) for (k, v) in self.items()]) + '}' - - __str__ = __repr__ - - # Because dict-like objects (except dict and UserDict) are not deep copied - # directly when constructing Environment(CPPDEFINES = OrderedPrintingDict(...)) - def __semi_deepcopy__(self): - return self.copy() - -cases=[('string', 'FOO'), - ('list', ['NAME1', 'NAME2']), - ('list-of-2lists', [('NAME1','VAL1'), ['NAME2','VAL2']]), - ('dict', OrderedPrintingDict([('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')])) - ] - -for (t1, c1) in cases: - for (t2, c2) in cases: - print("==== Testing CPPDEFINES, appending a %s to a %s"%(t2, t1)) - print(" orig = %s, append = %s"%(c1, c2)) - env=Environment(CPPDEFINES = c1, CPPDEFPREFIX='-D') - env.Append(CPPDEFINES = c2) - final=env.subst('$_CPPDEFFLAGS',source="src", target="tgt") - print('Append:\\n\\tresult=%s\\n\\tfinal=%s'%\\ - (env['CPPDEFINES'], final)) - env=Environment(CPPDEFINES = c1, CPPDEFPREFIX='-D') - env.AppendUnique(CPPDEFINES = c2) - final=env.subst('$_CPPDEFFLAGS',source="src", target="tgt") - print('AppendUnique:\\n\\tresult=%s\\n\\tfinal=%s'%\\ - (env['CPPDEFINES'], final)) -""") +# Note: explicitly set CPPDEFPREFIX here to simplify testing on Windows. +# Link: fixture/SConstruct-Append +test.file_fixture(["fixture", "SConstruct-Append"], "SConstruct") expect_print_output="""\ -DFOO -Dvalue=1 -Dfoo -Dbar -Dfoo -Dbar +-Dfoo -Dbar -Dbaz +-Dfoo bar -Dbaz +-Dfoo -Dbar baz +-Dfoo -Dbar -Dbaz +-DMacro2=Value2 -DMacro4 -DMacro3=Value3 -DMacro1=Value1 +-DMacro1=Value1 +-DMacro1 -DValue1 ==== Testing CPPDEFINES, appending a string to a string - orig = FOO, append = FOO + orig = 'FOO', append = 'FOO' +Append: + result=['FOO', 'FOO'] + final=-DFOO -DFOO +AppendUnique: + result=['FOO'] + final=-DFOO +==== Testing CPPDEFINES, appending a valuestring to a string + orig = 'FOO', append = 'NAME1=VAL1' Append: - result=['FOO', 'FOO'] - final=-DFOO -DFOO + result=['FOO', 'NAME1=VAL1'] + final=-DFOO -DNAME1=VAL1 AppendUnique: - result=['FOO'] - final=-DFOO + result=['FOO', 'NAME1=VAL1'] + final=-DFOO -DNAME1=VAL1 ==== Testing CPPDEFINES, appending a list to a string - orig = FOO, append = ['NAME1', 'NAME2'] + orig = 'FOO', append = ['NAME1', 'NAME2', 'NAME3'] Append: - result=['FOO', 'NAME1', 'NAME2'] - final=-DFOO -DNAME1 -DNAME2 + result=['FOO', 'NAME1', 'NAME2', 'NAME3'] + final=-DFOO -DNAME1 -DNAME2 -DNAME3 AppendUnique: - result=[('FOO',), ('NAME1',), ('NAME2',)] - final=-DFOO -DNAME1 -DNAME2 + result=['FOO', 'NAME1', 'NAME2', 'NAME3'] + final=-DFOO -DNAME1 -DNAME2 -DNAME3 +==== Testing CPPDEFINES, appending a tuple to a string + orig = 'FOO', append = ('NAME1', 'VAL1') +Append: + result=['FOO', ('NAME1', 'VAL1')] + final=-DFOO -DNAME1=VAL1 +AppendUnique: + result=['FOO', ('NAME1', 'VAL1')] + final=-DFOO -DNAME1=VAL1 ==== Testing CPPDEFINES, appending a list-of-2lists to a string - orig = FOO, append = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + orig = 'FOO', append = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] Append: - result=['FOO', ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] - final=-DFOO -DNAME1=VAL1 -DNAME2=VAL2 + result=['FOO', ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DFOO -DNAME1=VAL1 -DNAME2=VAL2 AppendUnique: - result=[('FOO',), ('NAME1', 'VAL1'), ('NAME2', 'VAL2')] - final=-DFOO -DNAME1=VAL1 -DNAME2=VAL2 + result=['FOO', ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DFOO -DNAME1=VAL1 -DNAME2=VAL2 ==== Testing CPPDEFINES, appending a dict to a string - orig = FOO, append = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} + orig = 'FOO', append = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} +Append: + result=['FOO', ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DFOO -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +AppendUnique: + result=['FOO', ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DFOO -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +==== Testing CPPDEFINES, appending a string to a valuestring + orig = 'NAME1=VAL1', append = 'FOO' +Append: + result=['NAME1=VAL1', 'FOO'] + final=-DNAME1=VAL1 -DFOO +AppendUnique: + result=['NAME1=VAL1', 'FOO'] + final=-DNAME1=VAL1 -DFOO +==== Testing CPPDEFINES, appending a valuestring to a valuestring + orig = 'NAME1=VAL1', append = 'NAME1=VAL1' Append: - result=['FOO', {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}] - final=-DFOO -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 + result=['NAME1=VAL1', 'NAME1=VAL1'] + final=-DNAME1=VAL1 -DNAME1=VAL1 AppendUnique: - result=['FOO', ('NAME2', 'VAL2'), 'NAME3', ('NAME1', 'VAL1')] - final=-DFOO -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 + result=['NAME1=VAL1'] + final=-DNAME1=VAL1 +==== Testing CPPDEFINES, appending a list to a valuestring + orig = 'NAME1=VAL1', append = ['NAME1', 'NAME2', 'NAME3'] +Append: + result=['NAME1=VAL1', 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1=VAL1 -DNAME1 -DNAME2 -DNAME3 +AppendUnique: + result=['NAME1=VAL1', 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1=VAL1 -DNAME1 -DNAME2 -DNAME3 +==== Testing CPPDEFINES, appending a tuple to a valuestring + orig = 'NAME1=VAL1', append = ('NAME1', 'VAL1') +Append: + result=['NAME1=VAL1', ('NAME1', 'VAL1')] + final=-DNAME1=VAL1 -DNAME1=VAL1 +AppendUnique: + result=['NAME1=VAL1'] + final=-DNAME1=VAL1 +==== Testing CPPDEFINES, appending a list-of-2lists to a valuestring + orig = 'NAME1=VAL1', append = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] +Append: + result=['NAME1=VAL1', ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME1=VAL1 -DNAME2=VAL2 +AppendUnique: + result=['NAME1=VAL1', ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME2=VAL2 +==== Testing CPPDEFINES, appending a dict to a valuestring + orig = 'NAME1=VAL1', append = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} +Append: + result=['NAME1=VAL1', ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +AppendUnique: + result=['NAME1=VAL1', ('NAME2', 'VAL2'), ('NAME3', None)] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME3 ==== Testing CPPDEFINES, appending a string to a list - orig = ['NAME1', 'NAME2'], append = FOO + orig = ['NAME1', 'NAME2', 'NAME3'], append = 'FOO' Append: - result=['NAME1', 'NAME2', 'FOO'] - final=-DNAME1 -DNAME2 -DFOO + result=['NAME1', 'NAME2', 'NAME3', 'FOO'] + final=-DNAME1 -DNAME2 -DNAME3 -DFOO AppendUnique: - result=[('NAME1',), ('NAME2',), ('FOO',)] - final=-DNAME1 -DNAME2 -DFOO + result=['NAME1', 'NAME2', 'NAME3', 'FOO'] + final=-DNAME1 -DNAME2 -DNAME3 -DFOO +==== Testing CPPDEFINES, appending a valuestring to a list + orig = ['NAME1', 'NAME2', 'NAME3'], append = 'NAME1=VAL1' +Append: + result=['NAME1', 'NAME2', 'NAME3', 'NAME1=VAL1'] + final=-DNAME1 -DNAME2 -DNAME3 -DNAME1=VAL1 +AppendUnique: + result=['NAME1', 'NAME2', 'NAME3', 'NAME1=VAL1'] + final=-DNAME1 -DNAME2 -DNAME3 -DNAME1=VAL1 ==== Testing CPPDEFINES, appending a list to a list - orig = ['NAME1', 'NAME2'], append = ['NAME1', 'NAME2'] + orig = ['NAME1', 'NAME2', 'NAME3'], append = ['NAME1', 'NAME2', 'NAME3'] Append: - result=['NAME1', 'NAME2', 'NAME1', 'NAME2'] - final=-DNAME1 -DNAME2 -DNAME1 -DNAME2 + result=['NAME1', 'NAME2', 'NAME3', 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1 -DNAME2 -DNAME3 -DNAME1 -DNAME2 -DNAME3 AppendUnique: - result=[('NAME1',), ('NAME2',)] - final=-DNAME1 -DNAME2 + result=['NAME1', 'NAME2', 'NAME3'] + final=-DNAME1 -DNAME2 -DNAME3 +==== Testing CPPDEFINES, appending a tuple to a list + orig = ['NAME1', 'NAME2', 'NAME3'], append = ('NAME1', 'VAL1') +Append: + result=['NAME1', 'NAME2', 'NAME3', ('NAME1', 'VAL1')] + final=-DNAME1 -DNAME2 -DNAME3 -DNAME1=VAL1 +AppendUnique: + result=['NAME1', 'NAME2', 'NAME3', ('NAME1', 'VAL1')] + final=-DNAME1 -DNAME2 -DNAME3 -DNAME1=VAL1 ==== Testing CPPDEFINES, appending a list-of-2lists to a list - orig = ['NAME1', 'NAME2'], append = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + orig = ['NAME1', 'NAME2', 'NAME3'], append = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] Append: - result=['NAME1', 'NAME2', ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] - final=-DNAME1 -DNAME2 -DNAME1=VAL1 -DNAME2=VAL2 + result=['NAME1', 'NAME2', 'NAME3', ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1 -DNAME2 -DNAME3 -DNAME1=VAL1 -DNAME2=VAL2 AppendUnique: - result=[('NAME1',), ('NAME2',), ('NAME1', 'VAL1'), ('NAME2', 'VAL2')] - final=-DNAME1 -DNAME2 -DNAME1=VAL1 -DNAME2=VAL2 + result=['NAME1', 'NAME2', 'NAME3', ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1 -DNAME2 -DNAME3 -DNAME1=VAL1 -DNAME2=VAL2 ==== Testing CPPDEFINES, appending a dict to a list - orig = ['NAME1', 'NAME2'], append = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} + orig = ['NAME1', 'NAME2', 'NAME3'], append = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} +Append: + result=['NAME1', 'NAME2', 'NAME3', ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME1 -DNAME2 -DNAME3 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +AppendUnique: + result=['NAME1', 'NAME2', 'NAME3', ('NAME2', 'VAL2'), ('NAME1', 'VAL1')] + final=-DNAME1 -DNAME2 -DNAME3 -DNAME2=VAL2 -DNAME1=VAL1 +==== Testing CPPDEFINES, appending a string to a tuple + orig = ('NAME1', 'VAL1'), append = 'FOO' +Append: + result=[('NAME1', 'VAL1'), 'FOO'] + final=-DNAME1=VAL1 -DFOO +AppendUnique: + result=[('NAME1', 'VAL1'), 'FOO'] + final=-DNAME1=VAL1 -DFOO +==== Testing CPPDEFINES, appending a valuestring to a tuple + orig = ('NAME1', 'VAL1'), append = 'NAME1=VAL1' +Append: + result=[('NAME1', 'VAL1'), 'NAME1=VAL1'] + final=-DNAME1=VAL1 -DNAME1=VAL1 +AppendUnique: + result=[('NAME1', 'VAL1')] + final=-DNAME1=VAL1 +==== Testing CPPDEFINES, appending a list to a tuple + orig = ('NAME1', 'VAL1'), append = ['NAME1', 'NAME2', 'NAME3'] +Append: + result=[('NAME1', 'VAL1'), 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1=VAL1 -DNAME1 -DNAME2 -DNAME3 +AppendUnique: + result=[('NAME1', 'VAL1'), 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1=VAL1 -DNAME1 -DNAME2 -DNAME3 +==== Testing CPPDEFINES, appending a tuple to a tuple + orig = ('NAME1', 'VAL1'), append = ('NAME1', 'VAL1') +Append: + result=[('NAME1', 'VAL1'), ('NAME1', 'VAL1')] + final=-DNAME1=VAL1 -DNAME1=VAL1 +AppendUnique: + result=[('NAME1', 'VAL1')] + final=-DNAME1=VAL1 +==== Testing CPPDEFINES, appending a list-of-2lists to a tuple + orig = ('NAME1', 'VAL1'), append = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] Append: - result=['NAME1', 'NAME2', {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}] - final=-DNAME1 -DNAME2 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 + result=[('NAME1', 'VAL1'), ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME1=VAL1 -DNAME2=VAL2 AppendUnique: - result=[('NAME1',), ('NAME2',), ('NAME2', 'VAL2'), ('NAME3',), ('NAME1', 'VAL1')] - final=-DNAME1 -DNAME2 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME2=VAL2 +==== Testing CPPDEFINES, appending a dict to a tuple + orig = ('NAME1', 'VAL1'), append = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} +Append: + result=[('NAME1', 'VAL1'), ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +AppendUnique: + result=[('NAME1', 'VAL1'), ('NAME2', 'VAL2'), ('NAME3', None)] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME3 ==== Testing CPPDEFINES, appending a string to a list-of-2lists - orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], append = FOO + orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], append = 'FOO' +Append: + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], 'FOO'] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DFOO +AppendUnique: + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], 'FOO'] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DFOO +==== Testing CPPDEFINES, appending a valuestring to a list-of-2lists + orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], append = 'NAME1=VAL1' Append: - result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], 'FOO'] - final=-DNAME1=VAL1 -DNAME2=VAL2 -DFOO + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], 'NAME1=VAL1'] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME1=VAL1 AppendUnique: - result=[('NAME1', 'VAL1'), ('NAME2', 'VAL2'), ('FOO',)] - final=-DNAME1=VAL1 -DNAME2=VAL2 -DFOO + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME2=VAL2 ==== Testing CPPDEFINES, appending a list to a list-of-2lists - orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], append = ['NAME1', 'NAME2'] + orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], append = ['NAME1', 'NAME2', 'NAME3'] +Append: + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME1 -DNAME2 -DNAME3 +AppendUnique: + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME1 -DNAME2 -DNAME3 +==== Testing CPPDEFINES, appending a tuple to a list-of-2lists + orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], append = ('NAME1', 'VAL1') Append: - result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], 'NAME1', 'NAME2'] - final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME1 -DNAME2 + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], ('NAME1', 'VAL1')] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME1=VAL1 AppendUnique: - result=[('NAME1', 'VAL1'), ('NAME2', 'VAL2'), ('NAME1',), ('NAME2',)] - final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME1 -DNAME2 + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME2=VAL2 ==== Testing CPPDEFINES, appending a list-of-2lists to a list-of-2lists orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], append = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] Append: - result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] - final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME1=VAL1 -DNAME2=VAL2 + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME1=VAL1 -DNAME2=VAL2 AppendUnique: - result=[('NAME1', 'VAL1'), ('NAME2', 'VAL2')] - final=-DNAME1=VAL1 -DNAME2=VAL2 + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME2=VAL2 ==== Testing CPPDEFINES, appending a dict to a list-of-2lists orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], append = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} Append: - result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}] - final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 AppendUnique: - result=[('NAME2', 'VAL2'), ('NAME3',), ('NAME1', 'VAL1')] - final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2'], ('NAME3', None)] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME3 ==== Testing CPPDEFINES, appending a string to a dict - orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, append = FOO + orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, append = 'FOO' Append: - result={'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1', 'FOO': None} - final=-DFOO -DNAME1=VAL1 -DNAME2=VAL2 -DNAME3 + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1'), 'FOO'] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 -DFOO AppendUnique: - result=[('NAME2', 'VAL2'), ('NAME3',), ('NAME1', 'VAL1'), 'FOO'] - final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 -DFOO + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1'), 'FOO'] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 -DFOO +==== Testing CPPDEFINES, appending a valuestring to a dict + orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, append = 'NAME1=VAL1' +Append: + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1'), 'NAME1=VAL1'] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 -DNAME1=VAL1 +AppendUnique: + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 ==== Testing CPPDEFINES, appending a list to a dict - orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, append = ['NAME1', 'NAME2'] + orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, append = ['NAME1', 'NAME2', 'NAME3'] Append: - result=[('NAME2', 'VAL2'), ('NAME3',), ('NAME1', 'VAL1'), 'NAME1', 'NAME2'] - final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 -DNAME1 -DNAME2 + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1'), 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 -DNAME1 -DNAME2 -DNAME3 AppendUnique: - result=[('NAME2', 'VAL2'), ('NAME3',), ('NAME1', 'VAL1'), ('NAME1',), ('NAME2',)] - final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 -DNAME1 -DNAME2 + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1'), 'NAME1', 'NAME2'] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 -DNAME1 -DNAME2 +==== Testing CPPDEFINES, appending a tuple to a dict + orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, append = ('NAME1', 'VAL1') +Append: + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1'), ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 -DNAME1=VAL1 +AppendUnique: + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 ==== Testing CPPDEFINES, appending a list-of-2lists to a dict orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, append = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] Append: - result=[('NAME2', 'VAL2'), ('NAME3',), ('NAME1', 'VAL1'), ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] - final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 -DNAME1=VAL1 -DNAME2=VAL2 + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1'), ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 -DNAME1=VAL1 -DNAME2=VAL2 AppendUnique: - result=[('NAME2', 'VAL2'), ('NAME3',), ('NAME1', 'VAL1')] - final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 ==== Testing CPPDEFINES, appending a dict to a dict orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, append = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} Append: - result={'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} - final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME3 + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1'), ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 AppendUnique: - result={'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} - final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME3 + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 """ build_output="scons: `.' is up to date.\n" - -expect = test.wrap_stdout(build_str=build_output, - read_str = expect_print_output) -test.run(arguments = '.', stdout=expect) +expect = test.wrap_stdout(build_str=build_output, read_str=expect_print_output) +test.run(arguments='.', stdout=expect) test.pass_test() # Local Variables: diff -Nru scons-4.4.0+dfsg/test/CPPDEFINES/basic.py scons-4.5.2+dfsg/test/CPPDEFINES/basic.py --- scons-4.4.0+dfsg/test/CPPDEFINES/basic.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/CPPDEFINES/basic.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify basic use of CPPPDEFINES with various data types. @@ -37,7 +36,7 @@ 'xyz', ['x', 'y', 'z'], ['x', ['y', 123], 'z', ('int', '$INTEGER')], - { 'c' : 3, 'b': None, 'a' : 1 }, + {'c': 3, 'b': None, 'a': 1}, "${TESTDEFS}", "${GEN}", ] @@ -48,33 +47,59 @@ return 'TARGET_AND_SOURCE_ARE_MISSING' for i in test_list: - env = Environment(CPPDEFPREFIX='-D', CPPDEFSUFFIX='', INTEGER=0, TESTDEFS=["FOO", "BAR=1"], GEN=generator) + env = Environment( + CPPDEFPREFIX='-D', + CPPDEFSUFFIX='', + INTEGER=0, + TESTDEFS=["FOO", "BAR=1"], + GEN=generator, + ) ttt = env.Entry('#ttt') sss = env.Entry('#sss') - print(env.Clone(CPPDEFINES=i).subst('$_CPPDEFFLAGS', target=[ttt], source=[sss])) + print( + env.Clone(CPPDEFINES=i).subst( + '$_CPPDEFFLAGS', + target=[ttt], + source=[sss], + ) + ) + for i in test_list: - env = Environment(CPPDEFPREFIX='|', CPPDEFSUFFIX='|', INTEGER=1, TESTDEFS=["FOO", "BAR=1"], GEN=generator) + env = Environment( + CPPDEFPREFIX='|', + CPPDEFSUFFIX='|', + INTEGER=1, + TESTDEFS=["FOO", "BAR=1"], + GEN=generator, + ) ttt = env.Entry('#ttt') sss = env.Entry('#sss') - print(env.Clone(CPPDEFINES=i).subst('$_CPPDEFFLAGS', target=[ttt], source=[sss])) + print( + env.Clone(CPPDEFINES=i).subst( + '$_CPPDEFFLAGS', + target=[ttt], + source=[sss], + ) + ) """) -expect = test.wrap_stdout(build_str="scons: `.' is up to date.\n", - read_str = """\ +expect = test.wrap_stdout( + build_str="scons: `.' is up to date.\n", + read_str="""\ -Dxyz -Dx -Dy -Dz -Dx -Dy=123 -Dz -Dint=0 --Da=1 -Db -Dc=3 +-Dc=3 -Db -Da=1 -DFOO -DBAR=1 -Dttt_GENERATED_sss |xyz| |x| |y| |z| |x| |y=123| |z| |int=1| -|a=1| |b| |c=3| +|c=3| |b| |a=1| |FOO| |BAR=1| |ttt_GENERATED_sss| -""") - +""", +) test.run(arguments = '.', stdout=expect) test.pass_test() diff -Nru scons-4.4.0+dfsg/test/CPPDEFINES/fixture/SConstruct-Append scons-4.5.2+dfsg/test/CPPDEFINES/fixture/SConstruct-Append --- scons-4.4.0+dfsg/test/CPPDEFINES/fixture/SConstruct-Append 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/CPPDEFINES/fixture/SConstruct-Append 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,133 @@ +# SPDX-License-Identifier: MIT +# +# Copyright The SCons Foundation + +"""Append/AppendUnique tests""" + +DefaultEnvironment(tools=[]) + +# Special cases: +# https://github.com/SCons/scons/issues/1738 +env_1738_2 = Environment(CPPDEFPREFIX='-D') +env_1738_2['CPPDEFINES'] = ['FOO'] +env_1738_2.Append(CPPDEFINES={'value': '1'}) +print(env_1738_2.subst('$_CPPDEFFLAGS')) +# env_1738_2.Object('test_1738_2', 'main.c') + +# https://github.com/SCons/scons/issues/2300 +env_2300_1 = Environment(CPPDEFINES='foo', CPPDEFPREFIX='-D') +env_2300_1.Append(CPPDEFINES='bar') +print(env_2300_1.subst('$_CPPDEFFLAGS')) + +env_2300_2 = Environment(CPPDEFINES=['foo'], CPPDEFPREFIX='-D') # note the list +env_2300_2.Append(CPPDEFINES='bar') +print(env_2300_2.subst('$_CPPDEFFLAGS')) + +# An initial space-separated string will be split, but not a string in a list. +env_multi = Environment(CPPDEFPREFIX='-D') +env_multi['CPPDEFINES'] = "foo bar" +env_multi.Append(CPPDEFINES="baz") +print(env_multi.subst('$_CPPDEFFLAGS')) + +env_multi = Environment(CPPDEFPREFIX='-D') +env_multi['CPPDEFINES'] = ["foo bar"] +env_multi.Append(CPPDEFINES="baz") +print(env_multi.subst('$_CPPDEFFLAGS')) + +env_multi = Environment(CPPDEFPREFIX='-D') +env_multi['CPPDEFINES'] = "foo" +env_multi.Append(CPPDEFINES=["bar baz"]) +print(env_multi.subst('$_CPPDEFFLAGS')) + +env_multi = Environment(CPPDEFPREFIX='-D') +env_multi['CPPDEFINES'] = "foo" +env_multi.Append(CPPDEFINES="bar baz") +print(env_multi.subst('$_CPPDEFFLAGS')) + +# Check that AppendUnique(..., delete_existing=True) works as expected. +# Each addition is in different but matching form, and different order +# so we expect a reordered list, but with the same macro defines. +env_multi = Environment(CPPDEFPREFIX='-D') +env_multi.Append(CPPDEFINES=["Macro1=Value1", ("Macro2", "Value2"), {"Macro3": "Value3"}, "Macro4"]) +try: + env_multi.AppendUnique(CPPDEFINES="Macro2=Value2", delete_existing=True) + env_multi.AppendUnique(CPPDEFINES=[("Macro4", None)], delete_existing=True) + env_multi.AppendUnique(CPPDEFINES=[("Macro3", "Value3")], delete_existing=True) + env_multi.AppendUnique(CPPDEFINES={"Macro1": "Value1"}, delete_existing=True) +except Exception as t: + print(f"Prepend FAILED: {t}") +else: + print(env_multi.subst('$_CPPDEFFLAGS')) + +# A lone tuple handled differently than a lone list. +env_multi = Environment(CPPDEFPREFIX='-D', CPPDEFINES=("Macro1", "Value1")) +print(env_multi.subst('$_CPPDEFFLAGS')) +env_multi = Environment(CPPDEFPREFIX='-D', CPPDEFINES=["Macro1", "Value1"]) +print(env_multi.subst('$_CPPDEFFLAGS')) + +# https://github.com/SCons/scons/issues/1152 +# https://github.com/SCons/scons/issues/2900 +# Python3 dicts dont preserve order. Hence we supply subclass of OrderedDict +# whose __str__ and __repr__ act like a normal dict. +from collections import OrderedDict + +class OrderedPrintingDict(OrderedDict): + def __repr__(self): + return '{' + ', '.join([f'{k!r}: {v!r}' for (k, v) in self.items()]) + '}' + + __str__ = __repr__ + + # Because dict-like objects (except dict and UserDict) are not deep copied + # directly when constructing Environment(CPPDEFINES=OrderedPrintingDict(...)) + def __semi_deepcopy__(self): + return self.copy() + + +# each of these types will be appended to each of the others +# the first item in each tuple is a label for the output +cases = [ + ('string', 'FOO'), + ('valuestring', 'NAME1=VAL1'), + ('list', ['NAME1', 'NAME2', 'NAME3']), + ('tuple', ('NAME1', 'VAL1')), + ('list-of-2lists', [('NAME1', 'VAL1'), ['NAME2', 'VAL2']]), + ( + 'dict', # intentionally not sorted by key + OrderedPrintingDict([('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')]), + ), +] + + +def dlist(coll): + # if it's a deque, turn it into a list for display purposes + from collections import deque + + if isinstance(coll, deque): + return list(coll) + return coll + + +for (t1, c1) in cases: + for (t2, c2) in cases: + print(f"==== Testing CPPDEFINES, appending a {t2} to a {t1}") + # string-like appearance if the value is a string + orig = f"{c1!r}" if isinstance(c1, str) else c1 + app = f"{c2!r}" if isinstance(c2, str) else c2 + print(f" orig = {orig}, append = {app}") + env = Environment(CPPDEFINES=c1, CPPDEFPREFIX='-D') + try: + env.Append(CPPDEFINES=c2) + final = env.subst('$_CPPDEFFLAGS', source="src", target="tgt") + print(f"Append:\n result={dlist(env['CPPDEFINES'])}\n final={final}") + except Exception as t: + print(f"Append:\n FAILED: {t}") + + env = Environment(CPPDEFINES=c1, CPPDEFPREFIX='-D') + try: + env.AppendUnique(CPPDEFINES=c2) + final = env.subst('$_CPPDEFFLAGS', source="src", target="tgt") + print( + f"AppendUnique:\n result={dlist(env['CPPDEFINES'])}\n final={final}" + ) + except Exception as t: + print(f"AppendUnique:\n FAILED: {t}") diff -Nru scons-4.4.0+dfsg/test/CPPDEFINES/fixture/SConstruct-Prepend scons-4.5.2+dfsg/test/CPPDEFINES/fixture/SConstruct-Prepend --- scons-4.4.0+dfsg/test/CPPDEFINES/fixture/SConstruct-Prepend 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/CPPDEFINES/fixture/SConstruct-Prepend 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,134 @@ +# SPDX-License-Identifier: MIT +# +# Copyright The SCons Foundation + +"""Prepend/PrependUnique tests""" + +DefaultEnvironment(tools=[]) + +# Special cases: +# https://github.com/SCons/scons/issues/1738 +env_1738_2 = Environment(CPPDEFPREFIX='-D') +env_1738_2['CPPDEFINES'] = ['FOO'] +env_1738_2.Prepend(CPPDEFINES={'value': '1'}) +print(env_1738_2.subst('$_CPPDEFFLAGS')) +# env_1738_2.Object('test_1738_2', 'main.c') + +# https://github.com/SCons/scons/issues/2300 +env_2300_1 = Environment(CPPDEFINES='foo', CPPDEFPREFIX='-D') +env_2300_1.Prepend(CPPDEFINES='bar') +print(env_2300_1.subst('$_CPPDEFFLAGS')) + +env_2300_2 = Environment(CPPDEFINES=['foo'], CPPDEFPREFIX='-D') # note the list +env_2300_2.Prepend(CPPDEFINES='bar') +print(env_2300_2.subst('$_CPPDEFFLAGS')) + +# An initial space-separated string will be split, but not a string in a list. +env_multi = Environment(CPPDEFPREFIX='-D') +env_multi['CPPDEFINES'] = "foo bar" +env_multi.Prepend(CPPDEFINES="baz") +print(env_multi.subst('$_CPPDEFFLAGS')) + +env_multi = Environment(CPPDEFPREFIX='-D') +env_multi['CPPDEFINES'] = ["foo bar"] +env_multi.Prepend(CPPDEFINES="baz") +print(env_multi.subst('$_CPPDEFFLAGS')) + +env_multi = Environment(CPPDEFPREFIX='-D') +env_multi['CPPDEFINES'] = "foo" +env_multi.Prepend(CPPDEFINES=["bar baz"]) +print(env_multi.subst('$_CPPDEFFLAGS')) + +env_multi = Environment(CPPDEFPREFIX='-D') +env_multi['CPPDEFINES'] = "foo" +env_multi.Prepend(CPPDEFINES="bar baz") +print(env_multi.subst('$_CPPDEFFLAGS')) + +# Check that PrependUnique(..., delete_existing=True) works as expected. +# Each addition is in different but matching form, and different order +# so we expect a reordered list, but with the same macro defines. +env_multi = Environment(CPPDEFPREFIX='-D') +env_multi.Prepend(CPPDEFINES=["Macro1=Value1", ("Macro2", "Value2"), {"Macro3": "Value3"}]) +try: + env_multi.PrependUnique(CPPDEFINES="Macro2=Value2", delete_existing=True) + env_multi.PrependUnique(CPPDEFINES=[("Macro4", None)], delete_existing=True) + env_multi.PrependUnique(CPPDEFINES=[("Macro3", "Value3")], delete_existing=True) + env_multi.PrependUnique(CPPDEFINES={"Macro1": "Value1"}, delete_existing=True) +except Exception as t: + print(f"Prepend FAILED: {t}") +else: + print(env_multi.subst('$_CPPDEFFLAGS')) + +# A lone tuple handled differently than a lone list. +env_tuple = Environment(CPPDEFPREFIX='-D', CPPDEFINES=("Macro1", "Value1")) +print(env_tuple.subst('$_CPPDEFFLAGS')) +env_multi = Environment(CPPDEFPREFIX='-D', CPPDEFINES=["Macro1", "Value1"]) +print(env_multi.subst('$_CPPDEFFLAGS')) + +# https://github.com/SCons/scons/issues/1152 +# https://github.com/SCons/scons/issues/2900 +# Python3 dicts dont preserve order. Hence we supply subclass of OrderedDict +# whose __str__ and __repr__ act like a normal dict. +from collections import OrderedDict + + +class OrderedPrintingDict(OrderedDict): + def __repr__(self): + return '{' + ', '.join([f'{k!r}: {v!r}' for (k, v) in self.items()]) + '}' + + __str__ = __repr__ + + # Because dict-like objects (except dict and UserDict) are not deep copied + # directly when constructing Environment(CPPDEFINES=OrderedPrintingDict(...)) + def __semi_deepcopy__(self): + return self.copy() + + +# each of these types will be prepended to each of the others +# the first item in each tuple is a label for the output +cases = [ + ('string', 'FOO'), + ('valuestring', 'NAME1=VAL1'), + ('list', ['NAME1', 'NAME2', 'NAME3']), + ('tuple', ('NAME1', 'VAL1')), + ('list-of-2lists', [('NAME1', 'VAL1'), ['NAME2', 'VAL2']]), + ( + 'dict', # intentionally not sorted by key + OrderedPrintingDict([('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')]), + ), +] + + +def dlist(coll): + # if it's a deque, turn it into a list for display purposes + from collections import deque + + if isinstance(coll, deque): + return list(coll) + return coll + + +for (t1, c1) in cases: + for (t2, c2) in cases: + print(f"==== Testing CPPDEFINES, prepending a {t2} to a {t1}") + # string-like appearance if the value is a string + orig = f"{c1!r}" if isinstance(c1, str) else c1 + pre = f"{c2!r}" if isinstance(c2, str) else c2 + print(f" orig = {orig}, prepend = {pre}") + env = Environment(CPPDEFINES=c1, CPPDEFPREFIX='-D') + try: + env.Prepend(CPPDEFINES=c2) + final = env.subst('$_CPPDEFFLAGS', source="src", target="tgt") + print(f"Prepend:\n result={dlist(env['CPPDEFINES'])}\n final={final}") + except Exception as t: + print(f"Prepend:\n FAILED: {t}") + + env = Environment(CPPDEFINES=c1, CPPDEFPREFIX='-D') + try: + env.PrependUnique(CPPDEFINES=c2) + final = env.subst('$_CPPDEFFLAGS', source="src", target="tgt") + print( + f"PrependUnique:\n result={dlist(env['CPPDEFINES'])}\n final={final}" + ) + except Exception as t: + print(f"PrependUnique:\n FAILED: {t}") diff -Nru scons-4.4.0+dfsg/test/CPPDEFINES/live.py scons-4.5.2+dfsg/test/CPPDEFINES/live.py --- scons-4.4.0+dfsg/test/CPPDEFINES/live.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/CPPDEFINES/live.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify basic use of CPPDEFINES with live compilation. @@ -33,14 +32,14 @@ test = TestSCons.TestSCons() test.write('SConstruct', """\ -foo = Environment(CPPDEFINES = ['FOO', ('VAL', '$VALUE')], VALUE=7) -bar = Environment(CPPDEFINES = {'BAR':None, 'VAL':8}) -baz = Environment(CPPDEFINES = ['BAZ', ('VAL', 9)]) -f = foo.Object(target = 'foo', source = 'prog.c') -b = bar.Object(target = 'bar', source = 'prog.c') -foo.Program(target = 'foo', source = f) -bar.Program(target = 'bar', source = b) -baz.Program(target = 'baz', source = 'baz.cpp') +foo = Environment(CPPDEFINES=['FOO', ('VAL', '$VALUE')], VALUE=7) +bar = Environment(CPPDEFINES={'BAR': None, 'VAL': 8}) +baz = Environment(CPPDEFINES=['BAZ', ('VAL', 9)]) +f = foo.Object(target='foo', source='prog.c') +b = bar.Object(target='bar', source='prog.c') +foo.Program(target='foo', source=f) +bar.Program(target='bar', source=b) +baz.Program(target='baz', source='baz.cpp') """) test.write('prog.c', r""" @@ -74,12 +73,10 @@ } """) - -test.run(arguments = '.') - -test.run(program = test.workpath('foo'), stdout = "prog.c: FOO 7\n") -test.run(program = test.workpath('bar'), stdout = "prog.c: BAR 8\n") -test.run(program = test.workpath('baz'), stdout = "baz.cpp: BAZ 9\n") +test.run(arguments='.') +test.run(program=test.workpath('foo'), stdout="prog.c: FOO 7\n") +test.run(program=test.workpath('bar'), stdout="prog.c: BAR 8\n") +test.run(program=test.workpath('baz'), stdout="baz.cpp: BAZ 9\n") test.pass_test() diff -Nru scons-4.4.0+dfsg/test/CPPDEFINES/pkg-config.py scons-4.5.2+dfsg/test/CPPDEFINES/pkg-config.py --- scons-4.4.0+dfsg/test/CPPDEFINES/pkg-config.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/CPPDEFINES/pkg-config.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify merging with MergeFlags to CPPPDEFINES with various data types. @@ -69,12 +68,14 @@ test.write('SConstruct', """\ import os import sys + # Python3 dicts dont preserve order. Hence we supply subclass of OrderedDict # whose __str__ and __repr__ act like a normal dict. from collections import OrderedDict + class OrderedPrintingDict(OrderedDict): def __repr__(self): - return '{' + ', '.join(['%r: %r'%(k, v) for (k, v) in self.items()]) + '}' + return '{' + ', '.join(['%r: %r' % (k, v) for (k, v) in self.items()]) + '}' __str__ = __repr__ @@ -85,29 +86,41 @@ """ + """ # https://github.com/SCons/scons/issues/2671 # Passing test cases -env_1 = Environment(CPPDEFINES=[('DEBUG','1'), 'TEST'], tools = ['%(pkg_config_tools)s']) +env_1 = Environment(CPPDEFINES=[('DEBUG', '1'), 'TEST'], tools=['%(pkg_config_tools)s']) if sys.platform == 'win32': - os.environ['PKG_CONFIG_PATH'] = env_1.Dir('.').abspath.replace("\\\\" , "/") -env_1.ParseConfig('%(pkg_config_cl_path)s "%(pkg_config_path)s" --cflags %(pkg_config_file)s') + os.environ['PKG_CONFIG_PATH'] = env_1.Dir('.').abspath.replace("\\\\", "/") +env_1.ParseConfig( + '%(pkg_config_cl_path)s "%(pkg_config_path)s" --cflags %(pkg_config_file)s' +) print(env_1.subst('$_CPPDEFFLAGS')) -env_2 = Environment(CPPDEFINES=[('DEBUG','1'), 'TEST'], tools = ['%(pkg_config_tools)s']) +env_2 = Environment(CPPDEFINES=[('DEBUG', '1'), 'TEST'], tools=['%(pkg_config_tools)s']) env_2.MergeFlags('-DSOMETHING -DVARIABLE=2') print(env_2.subst('$_CPPDEFFLAGS')) # Failing test cases -env_3 = Environment(CPPDEFINES=OrderedPrintingDict([('DEBUG', 1), ('TEST', None)]), tools = ['%(pkg_config_tools)s']) -env_3.ParseConfig('%(pkg_config_cl_path)s "%(pkg_config_path)s" --cflags %(pkg_config_file)s') +env_3 = Environment( + CPPDEFINES=OrderedPrintingDict([('DEBUG', 1), ('TEST', None)]), + tools=['%(pkg_config_tools)s'], +) +env_3.ParseConfig( + '%(pkg_config_cl_path)s "%(pkg_config_path)s" --cflags %(pkg_config_file)s' +) print(env_3.subst('$_CPPDEFFLAGS')) -env_4 = Environment(CPPDEFINES=OrderedPrintingDict([('DEBUG', 1), ('TEST', None)]), tools = ['%(pkg_config_tools)s']) +env_4 = Environment( + CPPDEFINES=OrderedPrintingDict([('DEBUG', 1), ('TEST', None)]), + tools=['%(pkg_config_tools)s'], +) env_4.MergeFlags('-DSOMETHING -DVARIABLE=2') print(env_4.subst('$_CPPDEFFLAGS')) # https://github.com/SCons/scons/issues/1738 -env_1738_1 = Environment(tools = ['%(pkg_config_tools)s']) -env_1738_1.ParseConfig('%(pkg_config_cl_path)s "%(pkg_config_path)s" --cflags --libs %(pkg_config_file)s') -env_1738_1.Append(CPPDEFINES={'value' : '1'}) +env_1738_1 = Environment(tools=['%(pkg_config_tools)s']) +env_1738_1.ParseConfig( + '%(pkg_config_cl_path)s "%(pkg_config_path)s" --cflags --libs %(pkg_config_file)s' +) +env_1738_1.Append(CPPDEFINES={'value': '1'}) print(env_1738_1.subst('$_CPPDEFFLAGS')) """%locals() ) @@ -119,11 +132,10 @@ -DSOMETHING -DVARIABLE=2 -Dvalue=1 """ -build_output="scons: `.' is up to date.\n" +build_output = "scons: `.' is up to date.\n" +expect = test.wrap_stdout(build_str=build_output, read_str=expect_print_output) +test.run(arguments='.', stdout=expect) -expect = test.wrap_stdout(build_str=build_output, - read_str = expect_print_output) -test.run(arguments = '.', stdout=expect) test.pass_test() # Local Variables: diff -Nru scons-4.4.0+dfsg/test/CPPDEFINES/prepend.py scons-4.5.2+dfsg/test/CPPDEFINES/prepend.py --- scons-4.4.0+dfsg/test/CPPDEFINES/prepend.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/CPPDEFINES/prepend.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,349 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Verify prepending to CPPPDEFINES with various data types. +""" + +import TestSCons + +test = TestSCons.TestSCons() + +# Note: explicitly set CPPDEFPREFIX here to simplify testing on Windows. + +# Link: fixture/SConstruct-Prepend +test.file_fixture(["fixture", "SConstruct-Prepend"], "SConstruct") + +expect_print_output="""\ +-Dvalue=1 -DFOO +-Dbar -Dfoo +-Dbar -Dfoo +-Dbaz -Dfoo -Dbar +-Dbaz -Dfoo bar +-Dbar baz -Dfoo +-Dbaz -Dbar -Dfoo +-DMacro1=Value1 -DMacro3=Value3 -DMacro4 -DMacro2=Value2 +-DMacro1=Value1 +-DMacro1 -DValue1 +==== Testing CPPDEFINES, prepending a string to a string + orig = 'FOO', prepend = 'FOO' +Prepend: + result=['FOO', 'FOO'] + final=-DFOO -DFOO +PrependUnique: + result=['FOO'] + final=-DFOO +==== Testing CPPDEFINES, prepending a valuestring to a string + orig = 'FOO', prepend = 'NAME1=VAL1' +Prepend: + result=['NAME1=VAL1', 'FOO'] + final=-DNAME1=VAL1 -DFOO +PrependUnique: + result=['NAME1=VAL1', 'FOO'] + final=-DNAME1=VAL1 -DFOO +==== Testing CPPDEFINES, prepending a list to a string + orig = 'FOO', prepend = ['NAME1', 'NAME2', 'NAME3'] +Prepend: + result=['NAME3', 'NAME2', 'NAME1', 'FOO'] + final=-DNAME3 -DNAME2 -DNAME1 -DFOO +PrependUnique: + result=['NAME3', 'NAME2', 'NAME1', 'FOO'] + final=-DNAME3 -DNAME2 -DNAME1 -DFOO +==== Testing CPPDEFINES, prepending a tuple to a string + orig = 'FOO', prepend = ('NAME1', 'VAL1') +Prepend: + result=[('NAME1', 'VAL1'), 'FOO'] + final=-DNAME1=VAL1 -DFOO +PrependUnique: + result=[('NAME1', 'VAL1'), 'FOO'] + final=-DNAME1=VAL1 -DFOO +==== Testing CPPDEFINES, prepending a list-of-2lists to a string + orig = 'FOO', prepend = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] +Prepend: + result=[['NAME2', 'VAL2'], ('NAME1', 'VAL1'), 'FOO'] + final=-DNAME2=VAL2 -DNAME1=VAL1 -DFOO +PrependUnique: + result=[['NAME2', 'VAL2'], ('NAME1', 'VAL1'), 'FOO'] + final=-DNAME2=VAL2 -DNAME1=VAL1 -DFOO +==== Testing CPPDEFINES, prepending a dict to a string + orig = 'FOO', prepend = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} +Prepend: + result=[('NAME1', 'VAL1'), ('NAME3', None), ('NAME2', 'VAL2'), 'FOO'] + final=-DNAME1=VAL1 -DNAME3 -DNAME2=VAL2 -DFOO +PrependUnique: + result=[('NAME1', 'VAL1'), ('NAME3', None), ('NAME2', 'VAL2'), 'FOO'] + final=-DNAME1=VAL1 -DNAME3 -DNAME2=VAL2 -DFOO +==== Testing CPPDEFINES, prepending a string to a valuestring + orig = 'NAME1=VAL1', prepend = 'FOO' +Prepend: + result=['FOO', 'NAME1=VAL1'] + final=-DFOO -DNAME1=VAL1 +PrependUnique: + result=['FOO', 'NAME1=VAL1'] + final=-DFOO -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a valuestring to a valuestring + orig = 'NAME1=VAL1', prepend = 'NAME1=VAL1' +Prepend: + result=['NAME1=VAL1', 'NAME1=VAL1'] + final=-DNAME1=VAL1 -DNAME1=VAL1 +PrependUnique: + result=['NAME1=VAL1'] + final=-DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a list to a valuestring + orig = 'NAME1=VAL1', prepend = ['NAME1', 'NAME2', 'NAME3'] +Prepend: + result=['NAME3', 'NAME2', 'NAME1', 'NAME1=VAL1'] + final=-DNAME3 -DNAME2 -DNAME1 -DNAME1=VAL1 +PrependUnique: + result=['NAME3', 'NAME2', 'NAME1', 'NAME1=VAL1'] + final=-DNAME3 -DNAME2 -DNAME1 -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a tuple to a valuestring + orig = 'NAME1=VAL1', prepend = ('NAME1', 'VAL1') +Prepend: + result=[('NAME1', 'VAL1'), 'NAME1=VAL1'] + final=-DNAME1=VAL1 -DNAME1=VAL1 +PrependUnique: + result=['NAME1=VAL1'] + final=-DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a list-of-2lists to a valuestring + orig = 'NAME1=VAL1', prepend = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] +Prepend: + result=[['NAME2', 'VAL2'], ('NAME1', 'VAL1'), 'NAME1=VAL1'] + final=-DNAME2=VAL2 -DNAME1=VAL1 -DNAME1=VAL1 +PrependUnique: + result=[['NAME2', 'VAL2'], 'NAME1=VAL1'] + final=-DNAME2=VAL2 -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a dict to a valuestring + orig = 'NAME1=VAL1', prepend = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} +Prepend: + result=[('NAME1', 'VAL1'), ('NAME3', None), ('NAME2', 'VAL2'), 'NAME1=VAL1'] + final=-DNAME1=VAL1 -DNAME3 -DNAME2=VAL2 -DNAME1=VAL1 +PrependUnique: + result=[('NAME3', None), ('NAME2', 'VAL2'), 'NAME1=VAL1'] + final=-DNAME3 -DNAME2=VAL2 -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a string to a list + orig = ['NAME1', 'NAME2', 'NAME3'], prepend = 'FOO' +Prepend: + result=['FOO', 'NAME1', 'NAME2', 'NAME3'] + final=-DFOO -DNAME1 -DNAME2 -DNAME3 +PrependUnique: + result=['FOO', 'NAME1', 'NAME2', 'NAME3'] + final=-DFOO -DNAME1 -DNAME2 -DNAME3 +==== Testing CPPDEFINES, prepending a valuestring to a list + orig = ['NAME1', 'NAME2', 'NAME3'], prepend = 'NAME1=VAL1' +Prepend: + result=['NAME1=VAL1', 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1=VAL1 -DNAME1 -DNAME2 -DNAME3 +PrependUnique: + result=['NAME1=VAL1', 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1=VAL1 -DNAME1 -DNAME2 -DNAME3 +==== Testing CPPDEFINES, prepending a list to a list + orig = ['NAME1', 'NAME2', 'NAME3'], prepend = ['NAME1', 'NAME2', 'NAME3'] +Prepend: + result=['NAME3', 'NAME2', 'NAME1', 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME3 -DNAME2 -DNAME1 -DNAME1 -DNAME2 -DNAME3 +PrependUnique: + result=['NAME1', 'NAME2', 'NAME3'] + final=-DNAME1 -DNAME2 -DNAME3 +==== Testing CPPDEFINES, prepending a tuple to a list + orig = ['NAME1', 'NAME2', 'NAME3'], prepend = ('NAME1', 'VAL1') +Prepend: + result=[('NAME1', 'VAL1'), 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1=VAL1 -DNAME1 -DNAME2 -DNAME3 +PrependUnique: + result=[('NAME1', 'VAL1'), 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1=VAL1 -DNAME1 -DNAME2 -DNAME3 +==== Testing CPPDEFINES, prepending a list-of-2lists to a list + orig = ['NAME1', 'NAME2', 'NAME3'], prepend = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] +Prepend: + result=[['NAME2', 'VAL2'], ('NAME1', 'VAL1'), 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME2=VAL2 -DNAME1=VAL1 -DNAME1 -DNAME2 -DNAME3 +PrependUnique: + result=[['NAME2', 'VAL2'], ('NAME1', 'VAL1'), 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME2=VAL2 -DNAME1=VAL1 -DNAME1 -DNAME2 -DNAME3 +==== Testing CPPDEFINES, prepending a dict to a list + orig = ['NAME1', 'NAME2', 'NAME3'], prepend = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} +Prepend: + result=[('NAME1', 'VAL1'), ('NAME3', None), ('NAME2', 'VAL2'), 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1=VAL1 -DNAME3 -DNAME2=VAL2 -DNAME1 -DNAME2 -DNAME3 +PrependUnique: + result=[('NAME1', 'VAL1'), ('NAME2', 'VAL2'), 'NAME1', 'NAME2', 'NAME3'] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME1 -DNAME2 -DNAME3 +==== Testing CPPDEFINES, prepending a string to a tuple + orig = ('NAME1', 'VAL1'), prepend = 'FOO' +Prepend: + result=['FOO', ('NAME1', 'VAL1')] + final=-DFOO -DNAME1=VAL1 +PrependUnique: + result=['FOO', ('NAME1', 'VAL1')] + final=-DFOO -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a valuestring to a tuple + orig = ('NAME1', 'VAL1'), prepend = 'NAME1=VAL1' +Prepend: + result=['NAME1=VAL1', ('NAME1', 'VAL1')] + final=-DNAME1=VAL1 -DNAME1=VAL1 +PrependUnique: + result=[('NAME1', 'VAL1')] + final=-DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a list to a tuple + orig = ('NAME1', 'VAL1'), prepend = ['NAME1', 'NAME2', 'NAME3'] +Prepend: + result=['NAME3', 'NAME2', 'NAME1', ('NAME1', 'VAL1')] + final=-DNAME3 -DNAME2 -DNAME1 -DNAME1=VAL1 +PrependUnique: + result=['NAME3', 'NAME2', 'NAME1', ('NAME1', 'VAL1')] + final=-DNAME3 -DNAME2 -DNAME1 -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a tuple to a tuple + orig = ('NAME1', 'VAL1'), prepend = ('NAME1', 'VAL1') +Prepend: + result=[('NAME1', 'VAL1'), ('NAME1', 'VAL1')] + final=-DNAME1=VAL1 -DNAME1=VAL1 +PrependUnique: + result=[('NAME1', 'VAL1')] + final=-DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a list-of-2lists to a tuple + orig = ('NAME1', 'VAL1'), prepend = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] +Prepend: + result=[['NAME2', 'VAL2'], ('NAME1', 'VAL1'), ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME1=VAL1 -DNAME1=VAL1 +PrependUnique: + result=[['NAME2', 'VAL2'], ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a dict to a tuple + orig = ('NAME1', 'VAL1'), prepend = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} +Prepend: + result=[('NAME1', 'VAL1'), ('NAME3', None), ('NAME2', 'VAL2'), ('NAME1', 'VAL1')] + final=-DNAME1=VAL1 -DNAME3 -DNAME2=VAL2 -DNAME1=VAL1 +PrependUnique: + result=[('NAME3', None), ('NAME2', 'VAL2'), ('NAME1', 'VAL1')] + final=-DNAME3 -DNAME2=VAL2 -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a string to a list-of-2lists + orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], prepend = 'FOO' +Prepend: + result=['FOO', ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DFOO -DNAME1=VAL1 -DNAME2=VAL2 +PrependUnique: + result=['FOO', ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DFOO -DNAME1=VAL1 -DNAME2=VAL2 +==== Testing CPPDEFINES, prepending a valuestring to a list-of-2lists + orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], prepend = 'NAME1=VAL1' +Prepend: + result=['NAME1=VAL1', ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME1=VAL1 -DNAME2=VAL2 +PrependUnique: + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME2=VAL2 +==== Testing CPPDEFINES, prepending a list to a list-of-2lists + orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], prepend = ['NAME1', 'NAME2', 'NAME3'] +Prepend: + result=['NAME3', 'NAME2', 'NAME1', ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME3 -DNAME2 -DNAME1 -DNAME1=VAL1 -DNAME2=VAL2 +PrependUnique: + result=['NAME3', 'NAME2', 'NAME1', ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME3 -DNAME2 -DNAME1 -DNAME1=VAL1 -DNAME2=VAL2 +==== Testing CPPDEFINES, prepending a tuple to a list-of-2lists + orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], prepend = ('NAME1', 'VAL1') +Prepend: + result=[('NAME1', 'VAL1'), ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME1=VAL1 -DNAME2=VAL2 +PrependUnique: + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME2=VAL2 +==== Testing CPPDEFINES, prepending a list-of-2lists to a list-of-2lists + orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], prepend = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] +Prepend: + result=[['NAME2', 'VAL2'], ('NAME1', 'VAL1'), ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME2=VAL2 -DNAME1=VAL1 -DNAME1=VAL1 -DNAME2=VAL2 +PrependUnique: + result=[('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME2=VAL2 +==== Testing CPPDEFINES, prepending a dict to a list-of-2lists + orig = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']], prepend = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} +Prepend: + result=[('NAME1', 'VAL1'), ('NAME3', None), ('NAME2', 'VAL2'), ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME1=VAL1 -DNAME3 -DNAME2=VAL2 -DNAME1=VAL1 -DNAME2=VAL2 +PrependUnique: + result=[('NAME3', None), ('NAME1', 'VAL1'), ['NAME2', 'VAL2']] + final=-DNAME3 -DNAME1=VAL1 -DNAME2=VAL2 +==== Testing CPPDEFINES, prepending a string to a dict + orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, prepend = 'FOO' +Prepend: + result=['FOO', ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DFOO -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +PrependUnique: + result=['FOO', ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DFOO -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a valuestring to a dict + orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, prepend = 'NAME1=VAL1' +Prepend: + result=['NAME1=VAL1', ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +PrependUnique: + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a list to a dict + orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, prepend = ['NAME1', 'NAME2', 'NAME3'] +Prepend: + result=['NAME3', 'NAME2', 'NAME1', ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME3 -DNAME2 -DNAME1 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +PrependUnique: + result=['NAME2', 'NAME1', ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME2 -DNAME1 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a tuple to a dict + orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, prepend = ('NAME1', 'VAL1') +Prepend: + result=[('NAME1', 'VAL1'), ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME1=VAL1 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +PrependUnique: + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a list-of-2lists to a dict + orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, prepend = [('NAME1', 'VAL1'), ['NAME2', 'VAL2']] +Prepend: + result=[['NAME2', 'VAL2'], ('NAME1', 'VAL1'), ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME1=VAL1 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +PrependUnique: + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +==== Testing CPPDEFINES, prepending a dict to a dict + orig = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'}, prepend = {'NAME2': 'VAL2', 'NAME3': None, 'NAME1': 'VAL1'} +Prepend: + result=[('NAME1', 'VAL1'), ('NAME3', None), ('NAME2', 'VAL2'), ('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME1=VAL1 -DNAME3 -DNAME2=VAL2 -DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +PrependUnique: + result=[('NAME2', 'VAL2'), ('NAME3', None), ('NAME1', 'VAL1')] + final=-DNAME2=VAL2 -DNAME3 -DNAME1=VAL1 +""" + +build_output="scons: `.' is up to date.\n" +expect = test.wrap_stdout(build_str=build_output, read_str=expect_print_output) +test.run(arguments='.', stdout=expect) +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/CPPDEFINES/scan.py scons-4.5.2+dfsg/test/CPPDEFINES/scan.py --- scons-4.4.0+dfsg/test/CPPDEFINES/scan.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/CPPDEFINES/scan.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify that use of the Scanner that evaluates CPP lines works as expected. @@ -41,12 +40,12 @@ f4_exe = 'f4' + TestSCons._exe test.write('SConstruct', """\ -env = Environment(CPPPATH = ['.']) +env = Environment(CPPPATH=['.']) -f1 = env.Object('f1', 'fff.c', CPPDEFINES = ['F1']) -f2 = env.Object('f2', 'fff.c', CPPDEFINES = [('F2', 1)]) -f3 = env.Object('f3', 'fff.c', CPPDEFINES = {'F3':None}) -f4 = env.Object('f4', 'fff.c', CPPDEFINES = {'F4':1}) +f1 = env.Object('f1', 'fff.c', CPPDEFINES=['F1']) +f2 = env.Object('f2', 'fff.c', CPPDEFINES=[('F2', 1)]) +f3 = env.Object('f3', 'fff.c', CPPDEFINES={'F3': None}) +f4 = env.Object('f4', 'fff.c', CPPDEFINES={'F4': 1}) env.Program('f1', ['prog.c', f1]) env.Program('f2', ['prog.c', f2]) @@ -110,20 +109,16 @@ test.run(arguments = '.') - test.run(program = test.workpath('f1'), stdout = "prog.c: F1\n") test.run(program = test.workpath('f2'), stdout = "prog.c: F2\n") test.run(program = test.workpath('f3'), stdout = "prog.c: F3\n") test.run(program = test.workpath('f4'), stdout = "prog.c: F4\n") - - test.write('f1.h', """ #define STRING "F1 again" """) test.up_to_date(arguments = '%(f2_exe)s %(f3_exe)s %(f4_exe)s' % locals()) - test.not_up_to_date(arguments = '.') test.run(program = test.workpath('f1'), stdout = "prog.c: F1 again\n") @@ -131,14 +126,11 @@ test.run(program = test.workpath('f3'), stdout = "prog.c: F3\n") test.run(program = test.workpath('f4'), stdout = "prog.c: F4\n") - - test.write('f2.h', """ #define STRING "F2 again" """) test.up_to_date(arguments = '%(f1_exe)s %(f3_exe)s %(f4_exe)s' % locals()) - test.not_up_to_date(arguments = '.') test.run(program = test.workpath('f1'), stdout = "prog.c: F1 again\n") @@ -146,14 +138,11 @@ test.run(program = test.workpath('f3'), stdout = "prog.c: F3\n") test.run(program = test.workpath('f4'), stdout = "prog.c: F4\n") - - test.write('f3.h', """ #define STRING "F3 again" """) test.up_to_date(arguments = '%(f1_exe)s %(f2_exe)s %(f4_exe)s' % locals()) - test.not_up_to_date(arguments = '.') test.run(program = test.workpath('f1'), stdout = "prog.c: F1 again\n") @@ -161,14 +150,11 @@ test.run(program = test.workpath('f3'), stdout = "prog.c: F3 again\n") test.run(program = test.workpath('f4'), stdout = "prog.c: F4\n") - - test.write('f4.h', """ #define STRING "F4 again" """) test.up_to_date(arguments = '%(f1_exe)s %(f2_exe)s %(f3_exe)s' % locals()) - test.not_up_to_date(arguments = '.') test.run(program = test.workpath('f1'), stdout = "prog.c: F1 again\n") @@ -176,8 +162,6 @@ test.run(program = test.workpath('f3'), stdout = "prog.c: F3 again\n") test.run(program = test.workpath('f4'), stdout = "prog.c: F4 again\n") - - test.pass_test() # Local Variables: diff -Nru scons-4.4.0+dfsg/test/CPPDEFINES/undefined.py scons-4.5.2+dfsg/test/CPPDEFINES/undefined.py --- scons-4.4.0+dfsg/test/CPPDEFINES/undefined.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/CPPDEFINES/undefined.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify that $_CPPDEFFLAGS doesn't barf when CPPDEFINES isn't defined. @@ -37,10 +36,8 @@ print(env.subst('$_CPPDEFFLAGS')) """) -expect = test.wrap_stdout(build_str="scons: `.' is up to date.\n", - read_str = "\n") - -test.run(arguments = '.', stdout=expect) +expect = test.wrap_stdout(build_str="scons: `.' is up to date.\n", read_str="\n") +test.run(arguments='.', stdout=expect) test.pass_test() diff -Nru scons-4.4.0+dfsg/test/CPPPATH/CPPPATH.py scons-4.5.2+dfsg/test/CPPPATH/CPPPATH.py --- scons-4.4.0+dfsg/test/CPPPATH/CPPPATH.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/CPPPATH/CPPPATH.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import os.path @@ -45,6 +44,7 @@ 'inc2') test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env = Environment(CPPPATH = ['$FOO', '${TARGET.dir}', '${SOURCE.dir}'], FOO='include') obj = env.Object(target='foobar/prog', source='subdir/prog.c') @@ -246,6 +246,7 @@ # Change CPPPATH and make sure we don't rebuild because of it. test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env = Environment(CPPPATH = Split('inc2 include ${TARGET.dir} ${SOURCE.dir}')) obj = env.Object(target='foobar/prog', source='subdir/prog.c') env.Program(target='prog', source=obj) diff -Nru scons-4.4.0+dfsg/test/Default.py scons-4.5.2+dfsg/test/Default.py --- scons-4.4.0+dfsg/test/Default.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Default.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify various combinations of arguments to Default() work properly. @@ -50,6 +49,7 @@ # test.write(['one', 'SConstruct'], """ B = Builder(action = r'%(_python_)s ../build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) env.B(target = 'foo.out', source = 'foo.in') env.B(target = 'bar.out', source = 'bar.in') @@ -58,6 +58,7 @@ test.write(['two', 'SConstruct'], """ B = Builder(action = r'%(_python_)s ../build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) env.B(target = 'foo.out', source = 'foo.in') env.B(target = 'bar.out', source = 'bar.in') @@ -66,6 +67,7 @@ test.write(['three', 'SConstruct'], """ B = Builder(action = r'%(_python_)s ../build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) env.B(target = 'foo.out', source = 'foo.in') env.B(target = 'bar.out', source = 'bar.in') @@ -74,6 +76,7 @@ test.write(['four', 'SConstruct'], """ B = Builder(action = r'%(_python_)s ../build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) env.B(target = ['foo bar'], source = 'foo.in') env.B(target = 'foo', source = 'foo.in') @@ -83,6 +86,7 @@ test.write(['five', 'SConstruct'], """ B = Builder(action = r'%(_python_)s ../build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) Default(env.B(target = 'foo.out', source = 'foo.in')) Default(env.B(target = 'bar.out', source = 'bar.in')) @@ -112,6 +116,7 @@ # Test how a None Default() argument works to disable/reset default targets. test.write(['six', 'SConstruct'], """\ B = Builder(action = r'%(_python_)s ../build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) foo = env.B(target = 'foo.out', source = 'foo.in') bar = env.B(target = 'bar.out', source = 'bar.in') @@ -123,6 +128,7 @@ test.write(['seven', 'SConstruct'], """\ B = Builder(action = r'%(_python_)s ../build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) foo = env.B(target = 'foo.out', source = 'foo.in') bar = env.B(target = 'bar.out', source = 'bar.in') @@ -134,6 +140,7 @@ test.write(['eight', 'SConstruct'], """\ B = Builder(action = r'%(_python_)s ../build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) foo = env.B(target = 'foo.out', source = 'foo.in') bar = env.B(target = 'bar.out', source = 'bar.in') @@ -150,6 +157,7 @@ test.write(['nine', 'SConstruct'], """\ B = Builder(action = r'%(_python_)s build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) env.B(target = 'xxx.out', source = 'xxx.in') SConscript('sub1/SConscript') @@ -159,6 +167,7 @@ test.write(['nine', 'sub1', 'SConscript'], """ B = Builder(action = r'%(_python_)s ../build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) env.B(target = 'xxx.out', source = 'xxx.in') Default('xxx.out') @@ -177,6 +186,7 @@ test.write(['ten', 'SConstruct'], """\ Default('sub2') B = Builder(action = r'%(_python_)s ../build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) env.B(target = 'xxx.out', source = 'xxx.in') SConscript('sub2/SConscript') @@ -186,6 +196,7 @@ test.write(['ten', 'sub2', 'SConscript'], """ B = Builder(action = r'%(_python_)s ../build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) env.B(target = 'xxx.out', source = 'xxx.in') """ % locals()) @@ -202,6 +213,7 @@ test.write(['eleven', 'SConstruct'], """ B = Builder(action = r'%(_python_)s ../build.py $TARGET $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }, XXX = 'foo.out') env.B(target = 'foo.out', source = 'foo.in') env.B(target = 'bar.out', source = 'bar.in') diff -Nru scons-4.4.0+dfsg/test/Dir/PyPackageDir/image/SConstruct scons-4.5.2+dfsg/test/Dir/PyPackageDir/image/SConstruct --- scons-4.4.0+dfsg/test/Dir/PyPackageDir/image/SConstruct 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Dir/PyPackageDir/image/SConstruct 2023-03-21 16:17:04.000000000 +0000 @@ -1,31 +1,31 @@ -import sys, os - -oldsyspath = sys.path -dir_path = Dir('.').srcnode().abspath -dir_path = os.path.join(dir_path, 'syspath') -sys.path.append(dir_path) - -def TestPyPackageDir(env, modname): - packagepath = env.PyPackageDir(modname).abspath - # Convert from an absolute path back to a relative one for testing - commonprefix = os.path.commonprefix([dir_path, packagepath]) - relpath = os.path.relpath(packagepath, commonprefix) - relpath = relpath.replace(os.sep, '/') - print(relpath) - -DefaultEnvironment(tools=[]) - -print("Test identification of directory for a given python package") -env = Environment(tools=[]) -TestPyPackageDir(env, 'testmod1') -TestPyPackageDir(env, 'testmod2') -TestPyPackageDir(env, 'submod1.testmod3') -TestPyPackageDir(env, 'submod1.submod2.testmod4') - -print("Test parameter substitution") -env = Environment(tools=[], FOO = 'submod1.submod2.testmod4') -TestPyPackageDir(env, '${FOO}') -env = Environment(tools=[], FOO = 'submod1.submod2', BAR = 'testmod4') -TestPyPackageDir(env, '${FOO}.${BAR}') - -sys.path = oldsyspath +import sys, os + +oldsyspath = sys.path +dir_path = Dir('.').srcnode().abspath +dir_path = os.path.join(dir_path, 'syspath') +sys.path.append(dir_path) + +def TestPyPackageDir(env, modname): + packagepath = env.PyPackageDir(modname).abspath + # Convert from an absolute path back to a relative one for testing + commonprefix = os.path.commonprefix([dir_path, packagepath]) + relpath = os.path.relpath(packagepath, commonprefix) + relpath = relpath.replace(os.sep, '/') + print(relpath) + +DefaultEnvironment(tools=[]) + +print("Test identification of directory for a given python package") +env = Environment(tools=[]) +TestPyPackageDir(env, 'testmod1') +TestPyPackageDir(env, 'testmod2') +TestPyPackageDir(env, 'submod1.testmod3') +TestPyPackageDir(env, 'submod1.submod2.testmod4') + +print("Test parameter substitution") +env = Environment(tools=[], FOO = 'submod1.submod2.testmod4') +TestPyPackageDir(env, '${FOO}') +env = Environment(tools=[], FOO = 'submod1.submod2', BAR = 'testmod4') +TestPyPackageDir(env, '${FOO}.${BAR}') + +sys.path = oldsyspath diff -Nru scons-4.4.0+dfsg/test/Dir/PyPackageDir/PyPackageDir.py scons-4.5.2+dfsg/test/Dir/PyPackageDir/PyPackageDir.py --- scons-4.4.0+dfsg/test/Dir/PyPackageDir/PyPackageDir.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Dir/PyPackageDir/PyPackageDir.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,54 +1,54 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -import TestSCons - -test = TestSCons.TestSCons() - -test.dir_fixture('image') - -test.run(arguments = '.', stdout = """\ -scons: Reading SConscript files ... -Test identification of directory for a given python package -testmod1 -. -submod1 -submod1/submod2 -Test parameter substitution -submod1/submod2 -submod1/submod2 -scons: done reading SConscript files. -scons: Building targets ... -scons: `.' is up to date. -scons: done building targets. -""") - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +import TestSCons + +test = TestSCons.TestSCons() + +test.dir_fixture('image') + +test.run(arguments = '.', stdout = """\ +scons: Reading SConscript files ... +Test identification of directory for a given python package +testmod1 +. +submod1 +submod1/submod2 +Test parameter substitution +submod1/submod2 +submod1/submod2 +scons: done reading SConscript files. +scons: Building targets ... +scons: `.' is up to date. +scons: done building targets. +""") + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/diskcheck.py scons-4.5.2+dfsg/test/diskcheck.py --- scons-4.4.0+dfsg/test/diskcheck.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/diskcheck.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,7 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test that the --diskcheck option and SetOption('diskcheck') correctly @@ -41,16 +41,23 @@ test.write('SConstruct', """ -SetOption('diskcheck', 'none') + +if GetOption('diskcheck') == ['match'] or ARGUMENTS.get('setoption_none',0): + SetOption('diskcheck', 'none') File('subdir') """) -test.run() +test.run(status=2, stderr=None) +test.must_contain_all_lines(test.stderr(), ["found where file expected"]) test.run(arguments='--diskcheck=match', status=2, stderr=None) test.must_contain_all_lines(test.stderr(), ["found where file expected"]) +# Test that setting --diskcheck to none via command line also works. +test.run(arguments='--diskcheck=none') +# Test that SetOption('diskcheck','none') works to override default as well +test.run(arguments='setoption_none=1') test.pass_test() diff -Nru scons-4.4.0+dfsg/test/fixture/SConstruct-check-valid-options scons-4.5.2+dfsg/test/fixture/SConstruct-check-valid-options --- scons-4.4.0+dfsg/test/fixture/SConstruct-check-valid-options 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/fixture/SConstruct-check-valid-options 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,21 @@ +import sys +from SCons.Script.SConsOptions import SConsOptionParser, SConsBadOptionError + +AddOption( + '--testing', + help='Test arg', +) + +if ARGUMENTS.get('raise', 0) == '1': + ValidateOptions(throw_exception=True) +elif ARGUMENTS.get('raise', 0) == '2': + try: + ValidateOptions(throw_exception=True) + except SConsBadOptionError as e: + print("Parser is SConsOptionParser:%s" % (isinstance(e.parser, SConsOptionParser))) + print("Message is :%s" % e.opt_str) + Exit(3) +else: + ValidateOptions() + +print("This is in SConstruct") diff -Nru scons-4.4.0+dfsg/test/Fortran/F77PATH.py scons-4.5.2+dfsg/test/Fortran/F77PATH.py --- scons-4.4.0+dfsg/test/Fortran/F77PATH.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Fortran/F77PATH.py 2023-03-21 16:17:04.000000000 +0000 @@ -46,6 +46,7 @@ test.subdir('include', 'subdir', ['subdir', 'include'], 'foobar', 'inc2') test.write('SConstruct', """\ +DefaultEnvironment(tools=[]) # test speedup env = Environment( F77='%s', F77PATH=['$FOO', '${TARGET.dir}', '${SOURCE.dir}'], @@ -226,6 +227,7 @@ # Change F77PATH and make sure we don't rebuild because of it. test.write('SConstruct', """\ +DefaultEnvironment(tools=[]) # test speedup env = Environment( F77='%s', F77PATH=Split('inc2 include ${TARGET.dir} ${SOURCE.dir}'), @@ -285,6 +287,7 @@ # Check that a null-string F77PATH doesn't blow up. test.write('SConstruct', """\ +DefaultEnvironment(tools=[]) # test speedup env = Environment(tools=['%s'], F77PATH='', F77FLAGS='-x f77') env.Object('foo', source='empty.f77') """ % fc) diff -Nru scons-4.4.0+dfsg/test/Fortran/F90PATH.py scons-4.5.2+dfsg/test/Fortran/F90PATH.py --- scons-4.4.0+dfsg/test/Fortran/F90PATH.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Fortran/F90PATH.py 2023-03-21 16:17:04.000000000 +0000 @@ -46,6 +46,7 @@ test.subdir('include', 'subdir', ['subdir', 'include'], 'foobar', 'inc2') test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env = Environment( F90=r'%s', F90PATH=['$FOO', '${TARGET.dir}', '${SOURCE.dir}'], @@ -228,6 +229,7 @@ # Change F90PATH and make sure we don't rebuild because of it. test.write('SConstruct', """\ +DefaultEnvironment(tools=[]) # test speedup env = Environment( F90=r'%s', F90PATH=Split('inc2 include ${TARGET.dir} ${SOURCE.dir}'), diff -Nru scons-4.4.0+dfsg/test/Fortran/F95FLAGS.py scons-4.5.2+dfsg/test/Fortran/F95FLAGS.py --- scons-4.4.0+dfsg/test/Fortran/F95FLAGS.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Fortran/F95FLAGS.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import TestSCons @@ -31,26 +30,30 @@ test = TestSCons.TestSCons() _exe = TestSCons._exe +# ref: test/fixture/mylink.py test.file_fixture('mylink.py') +# ref: test/Fortran/fixture/myfortran_flags.py test.file_fixture(['fixture', 'myfortran_flags.py']) test.write('SConstruct', """ -env = Environment(LINK = r'%(_python_)s mylink.py', - LINKFLAGS = [], - F95 = r'%(_python_)s myfortran_flags.py g95', - F95FLAGS = '-x', - FORTRAN = r'%(_python_)s myfortran_flags.py fortran', - FORTRANFLAGS = '-y') -env.Program(target = 'test01', source = 'test01.f') -env.Program(target = 'test02', source = 'test02.F') -env.Program(target = 'test03', source = 'test03.for') -env.Program(target = 'test04', source = 'test04.FOR') -env.Program(target = 'test05', source = 'test05.ftn') -env.Program(target = 'test06', source = 'test06.FTN') -env.Program(target = 'test07', source = 'test07.fpp') -env.Program(target = 'test08', source = 'test08.FPP') -env.Program(target = 'test13', source = 'test13.f95') -env.Program(target = 'test14', source = 'test14.F95') +env = Environment( + LINK=r'%(_python_)s mylink.py', + LINKFLAGS=[], + F95=r'%(_python_)s myfortran_flags.py g95', + F95FLAGS='-x', + FORTRAN=r'%(_python_)s myfortran_flags.py fortran', + FORTRANFLAGS='-y', +) +env.Program(target='test01', source='test01.f') +env.Program(target='test02', source='test02.F') +env.Program(target='test03', source='test03.for') +env.Program(target='test04', source='test04.FOR') +env.Program(target='test05', source='test05.ftn') +env.Program(target='test06', source='test06.FTN') +env.Program(target='test07', source='test07.fpp') +env.Program(target='test08', source='test08.FPP') +env.Program(target='test13', source='test13.f95') +env.Program(target='test14', source='test14.F95') """ % locals()) test.write('test01.f', "This is a .f file.\n#link\n#fortran\n") @@ -80,24 +83,22 @@ fc = 'f95' g95 = test.detect_tool(fc) - - if g95: test.subdir('x') - test.write(['x','dummy.i'], """ # Exists only such that -Ix finds the directory... """) + # ref: test/fixture/wrapper.py test.file_fixture('wrapper.py') test.write('SConstruct', """ -foo = Environment(F95 = '%(fc)s') +foo = Environment(F95='%(fc)s') f95 = foo.Dictionary('F95') -bar = foo.Clone(F95 = r'%(_python_)s wrapper.py ' + f95, F95FLAGS = '-Ix') -foo.Program(target = 'foo', source = 'foo.f95') -bar.Program(target = 'bar', source = 'bar.f95') +bar = foo.Clone(F95=r'%(_python_)s wrapper.py ' + f95, F95FLAGS='-Ix') +foo.Program(target='foo', source='foo.f95') +bar.Program(target='bar', source='bar.f95') """ % locals()) test.write('foo.f95', r""" @@ -114,21 +115,18 @@ END """) - - test.run(arguments = 'foo' + _exe, stderr = None) - - test.run(program = test.workpath('foo'), stdout = " foo.f95\n") - + test.run(arguments='foo' + _exe, stderr=None) + test.run(program=test.workpath('foo'), stdout=" foo.f95\n") test.must_not_exist('wrapper.out') import sys - if sys.platform[:5] == 'sunos': - test.run(arguments = 'bar' + _exe, stderr = None) - else: - test.run(arguments = 'bar' + _exe) - test.run(program = test.workpath('bar'), stdout = " bar.f95\n") + if sys.platform.startswith('sunos'): + test.run(arguments='bar' + _exe, stderr=None) + else: + test.run(arguments='bar' + _exe) + test.run(program=test.workpath('bar'), stdout=" bar.f95\n") test.must_match('wrapper.out', "wrapper.py\n") test.pass_test() diff -Nru scons-4.4.0+dfsg/test/Fortran/FORTRANPATH.py scons-4.5.2+dfsg/test/Fortran/FORTRANPATH.py --- scons-4.4.0+dfsg/test/Fortran/FORTRANPATH.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Fortran/FORTRANPATH.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,5 @@ #!/usr/bin/env python +# # MIT License # # Copyright The SCons Foundation @@ -45,6 +46,7 @@ test.subdir('include', 'subdir', ['subdir', 'include'], 'foobar', 'inc2') test.write('SConstruct', """\ +DefaultEnvironment(tools=[]) # test speedup env = Environment( FORTRAN='%s', FORTRANPATH=['$FOO', '${TARGET.dir}', '${SOURCE.dir}'], @@ -241,6 +243,7 @@ # Change FORTRANPATH and make sure we don't rebuild because of it. test.write('SConstruct', """\ +DefaultEnvironment(tools=[]) # test speedup env = Environment( FORTRAN='%s', FORTRANPATH=Split('inc2 include ${TARGET.dir} ${SOURCE.dir}'), @@ -304,6 +307,7 @@ # Check that a null-string FORTRANPATH doesn't blow up. test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env = Environment(FORTRANPATH = '') env.Object('foo', source = 'empty.f') """) diff -Nru scons-4.4.0+dfsg/test/Fortran/link-with-cxx.py scons-4.5.2+dfsg/test/Fortran/link-with-cxx.py --- scons-4.4.0+dfsg/test/Fortran/link-with-cxx.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Fortran/link-with-cxx.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify the smart_link() warning messages used when attempting to link @@ -41,6 +40,7 @@ test.write('test_linker.py', """\ import sys + if sys.argv[1] == '-o': with open(sys.argv[2], 'wb') as ofp: for infile in sys.argv[3:]: @@ -54,9 +54,9 @@ sys.exit(0) """) - test.write('test_fortran.py', """\ import sys + with open(sys.argv[2], 'wb') as ofp: for infile in sys.argv[4:]: with open(infile, 'rb') as ifp: @@ -64,35 +64,35 @@ sys.exit(0) """) - test.write('SConstruct', """ import SCons.Tool.link + def copier(target, source, env): s = str(source[0]) t = str(target[0]) with open(t, 'wb') as ofp, open(s, 'rb') as ifp: ofp.write(ifp.read()) -env = Environment(CXX = r'%(_python_)s test_linker.py', - CXXCOM = Action(copier), - SMARTLINK = SCons.Tool.link.smart_link, - LINK = r'$SMARTLINK', - LINKFLAGS = '', - # We want to re-define this as follows (so as to - # not rely on a real Fortran compiler) but can't - # because $FORTRANCOM is defined with an extra space - # so it ends up as a CommandAction, not a LazyAction. - # Must look into changing that after 1.0 is out. - #FORTRANCOM = Action(copier)) - FORTRAN = r'%(_python_)s test_fortran.py') +env = Environment( + CXX=r'%(_python_)s test_linker.py', + CXXCOM=Action(copier), + SMARTLINK=SCons.Tool.link.smart_link, + LINK=r'$SMARTLINK', + LINKFLAGS='', + # We want to re-define this as follows (so as to + # not rely on a real Fortran compiler) but can't + # because $FORTRANCOM is defined with an extra space + # so it ends up as a CommandAction, not a LazyAction. + # Must look into changing that after 1.0 is out. + # FORTRANCOM = Action(copier)) + FORTRAN=r'%(_python_)s test_fortran.py', +) env.Program('prog1.exe', ['f1.cpp', 'f2.f']) env.Program('prog2.exe', ['f1.cpp', 'f2.f']) if ARGUMENTS.get('NO_LINK'): - # Can remove no-deprecated when we drop Python1.5 - SetOption('warn', ['no-link', 'no-deprecated']) + SetOption('warn', ['no-link']) if ARGUMENTS.get('NO_MIX'): - # Can remove no-deprecated when we drop Python1.5 - SetOption('warn', ['no-fortran-cxx-mix', 'no-deprecated']) + SetOption('warn', ['no-fortran-cxx-mix']) """ % locals()) test.write('f1.cpp', "f1.cpp\n") @@ -100,52 +100,43 @@ expect = (""" scons: warning: Using \\$CXX to link Fortran and C\\+\\+ code together. -\tThis may generate a buggy executable if the '%s test_linker.py' -\tcompiler does not know how to deal with Fortran runtimes. + This may generate a buggy executable if the '%s test_linker.py' + compiler does not know how to deal with Fortran runtimes. """ % re.escape(_python_)) + TestSCons.file_expr test.run(arguments = '.', stderr=expect) - test.must_match('prog1.exe', "f1.cpp\nf2.f\n") test.must_match('prog2.exe', "f1.cpp\nf2.f\n") test.run(arguments = '-c .', stderr=expect) - test.must_not_exist('prog1.exe') test.must_not_exist('prog2.exe') test.run(arguments = '--warning=no-link .') - test.must_match('prog1.exe', "f1.cpp\nf2.f\n") test.must_match('prog2.exe', "f1.cpp\nf2.f\n") test.run(arguments = '-c .', stderr=expect) - test.must_not_exist('prog1.exe') test.must_not_exist('prog2.exe') test.run(arguments = '--warning=no-fortran-cxx-mix .') - test.must_match('prog1.exe', "f1.cpp\nf2.f\n") test.must_match('prog2.exe', "f1.cpp\nf2.f\n") test.run(arguments = '-c .', stderr=expect) - test.must_not_exist('prog1.exe') test.must_not_exist('prog2.exe') test.run(arguments = 'NO_LINK=1 .') - test.must_match('prog1.exe', "f1.cpp\nf2.f\n") test.must_match('prog2.exe', "f1.cpp\nf2.f\n") test.run(arguments = '-c .', stderr=expect) - test.must_not_exist('prog1.exe') test.must_not_exist('prog2.exe') test.run(arguments = 'NO_MIX=1 .') - test.must_match('prog1.exe', "f1.cpp\nf2.f\n") test.must_match('prog2.exe', "f1.cpp\nf2.f\n") diff -Nru scons-4.4.0+dfsg/test/Fortran/SHF95FLAGS.py scons-4.5.2+dfsg/test/Fortran/SHF95FLAGS.py --- scons-4.4.0+dfsg/test/Fortran/SHF95FLAGS.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Fortran/SHF95FLAGS.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import TestSCons @@ -32,23 +31,25 @@ obj_ = TestSCons.shobj_ test = TestSCons.TestSCons() +# ref: test/Fortran/fixture/myfortran_flags.py test.file_fixture(['fixture', 'myfortran_flags.py']) test.write('SConstruct', """ -env = Environment(SHF95 = r'%(_python_)s myfortran_flags.py g95', - SHFORTRAN = r'%(_python_)s myfortran_flags.py fortran') -env.Append(SHF95FLAGS = '-x', - SHFORTRANFLAGS = '-y') -env.SharedObject(target = 'test01', source = 'test01.f') -env.SharedObject(target = 'test02', source = 'test02.F') -env.SharedObject(target = 'test03', source = 'test03.for') -env.SharedObject(target = 'test04', source = 'test04.FOR') -env.SharedObject(target = 'test05', source = 'test05.ftn') -env.SharedObject(target = 'test06', source = 'test06.FTN') -env.SharedObject(target = 'test07', source = 'test07.fpp') -env.SharedObject(target = 'test08', source = 'test08.FPP') -env.SharedObject(target = 'test13', source = 'test13.f95') -env.SharedObject(target = 'test14', source = 'test14.F95') +env = Environment( + SHF95=r'%(_python_)s myfortran_flags.py g95', + SHFORTRAN=r'%(_python_)s myfortran_flags.py fortran', +) +env.Append(SHF95FLAGS='-x', SHFORTRANFLAGS='-y') +env.SharedObject(target='test01', source='test01.f') +env.SharedObject(target='test02', source='test02.F') +env.SharedObject(target='test03', source='test03.for') +env.SharedObject(target='test04', source='test04.FOR') +env.SharedObject(target='test05', source='test05.ftn') +env.SharedObject(target='test06', source='test06.FTN') +env.SharedObject(target='test07', source='test07.fpp') +env.SharedObject(target='test08', source='test08.FPP') +env.SharedObject(target='test13', source='test13.f95') +env.SharedObject(target='test14', source='test14.F95') """ % locals()) test.write('test01.f', "This is a .f file.\n#fortran\n") @@ -62,8 +63,7 @@ test.write('test13.f95', "This is a .f95 file.\n#g95\n") test.write('test14.F95', "This is a .F95 file.\n#g95\n") -test.run(arguments = '.', stderr = None) - +test.run(arguments='.', stderr=None) test.must_match(obj_ + 'test01' + _obj, " -c -y\nThis is a .f file.\n") test.must_match(obj_ + 'test02' + _obj, " -c -y\nThis is a .F file.\n") test.must_match(obj_ + 'test03' + _obj, " -c -y\nThis is a .for file.\n") @@ -75,29 +75,24 @@ test.must_match(obj_ + 'test13' + _obj, " -c -x\nThis is a .f95 file.\n") test.must_match(obj_ + 'test14' + _obj, " -c -x\nThis is a .F95 file.\n") - - fc = 'f95' g95 = test.detect_tool(fc) - if g95: - test.subdir('x') - test.write(['x','dummy.i'], """ # Exists only such that -Ix finds the directory... """) + # ref: test/fixture/wrapper.py test.file_fixture('wrapper.py') - test.write('SConstruct', """ -foo = Environment(SHF95 = '%(fc)s') +foo = Environment(SHF95='%(fc)s') shf95 = foo.Dictionary('SHF95') -bar = foo.Clone(SHF95 = r'%(_python_)s wrapper.py ' + shf95) -bar.Append(SHF95FLAGS = '-Ix') -foo.SharedLibrary(target = 'foo/foo', source = 'foo.f95') -bar.SharedLibrary(target = 'bar/bar', source = 'bar.f95') +bar = foo.Clone(SHF95=r'%(_python_)s wrapper.py ' + shf95) +bar.Append(SHF95FLAGS='-Ix') +foo.SharedLibrary(target='foo/foo', source='foo.f95') +bar.SharedLibrary(target='bar/bar', source='bar.f95') """ % locals()) test.write('foo.f95', r""" @@ -114,17 +109,15 @@ END """) - - test.run(arguments = 'foo', stderr = None) - + test.run(arguments='foo', stderr=None) test.must_not_exist('wrapper.out') import sys - if sys.platform[:5] == 'sunos': - test.run(arguments = 'bar', stderr = None) - else: - test.run(arguments = 'bar') + if sys.platform.startswith('sunos'): + test.run(arguments='bar', stderr=None) + else: + test.run(arguments='bar') test.must_match('wrapper.out', "wrapper.py\n") test.pass_test() diff -Nru scons-4.4.0+dfsg/test/import.py scons-4.5.2+dfsg/test/import.py --- scons-4.4.0+dfsg/test/import.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/import.py 2023-03-21 16:17:04.000000000 +0000 @@ -128,23 +128,26 @@ import os.path qtdir = os.path.dirname(os.path.dirname(moc)) - - qt_err = r""" -scons: warning: Could not detect qt, using moc executable as a hint \(QTDIR=%(qtdir)s\) -""" % locals() - + qt3_err = fr""" +scons: warning: Could not detect qt3, using moc executable as a hint \(QT3DIR={qtdir}\) +""" else: + qt3_err = r""" +scons: warning: Could not detect qt3, using empty QT3DIR +""" - qt_err = """ -scons: warning: Could not detect qt, using empty QTDIR +qt_moved = r""" +scons: \*\*\* Deprecated tool 'qt' renamed to 'qt3'. Please update your build accordingly. 'qt3' will be removed entirely in a future release. """ -qt_warnings = [ re.compile(qt_err + TestSCons.file_expr) ] +qt3_warnings = [re.compile(qt3_err + TestSCons.file_expr)] +qt_error = [re.compile(qt_moved + TestSCons.file_expr)] error_output = { - 'icl' : intel_warnings, - 'intelc' : intel_warnings, - 'qt' : qt_warnings, + 'icl': intel_warnings, + 'intelc': intel_warnings, + 'qt3': qt3_warnings, + 'qt': qt_error, } # An SConstruct for importing Tool names that have illegal characters @@ -178,16 +181,16 @@ test.write('SConstruct', indirect_import % locals()) else: test.write('SConstruct', direct_import % locals()) - test.run(stderr=None) + test.run(stderr=None, status=None) stderr = test.stderr() - if stderr: + if stderr or test.status: matched = None for expression in error_output.get(tool, []): if expression.match(stderr): matched = 1 break if not matched: - print("Failed importing '%s', stderr:" % tool) + print(f"Failed importing '{tool}', stderr:") print(stderr) failures.append(tool) diff -Nru scons-4.4.0+dfsg/test/Interactive/taskmastertrace.py scons-4.5.2+dfsg/test/Interactive/taskmastertrace.py --- scons-4.4.0+dfsg/test/Interactive/taskmastertrace.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Interactive/taskmastertrace.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,8 +22,7 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" + """ Verify use of the --taskmastertrace= option to the "build" command @@ -42,7 +43,6 @@ test.write('foo.in', "foo.in 1\n") - scons = test.start(arguments = '-Q --interactive') scons.send("build foo.out 1\n") @@ -101,7 +101,6 @@ Taskmaster: Looking for a node to evaluate Taskmaster: No candidate anymore. - scons>>> Touch("2") scons>>> scons: `foo.out' is up to date. scons>>> @@ -109,8 +108,6 @@ test.finish(scons, stdout = expect_stdout) - - test.pass_test() # Local Variables: diff -Nru scons-4.4.0+dfsg/test/Java/inner-cacheable-live.py scons-4.5.2+dfsg/test/Java/inner-cacheable-live.py --- scons-4.4.0+dfsg/test/Java/inner-cacheable-live.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/Java/inner-cacheable-live.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,77 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Test Java inner classes can be cached. Requires a working JDK. + +Regression test: one iteration of CacheDir left it unable to deal +with class names from the emitter which contained an embedded '$'. +Led to error like: + +SyntaxError `invalid syntax (, line 1)' trying to evaluate `$Inner.class' +""" + +import TestSCons + +test = TestSCons.TestSCons() +where_javac, java_version = test.java_where_javac() + +# Work around javac 1.4 not reporting its version: +java_version = java_version or "1.4" + +# Skip this test as SCons doesn't (currently) predict the generated +# inner/anonymous class generated .class files generated by gcj +# and so will always fail. +if test.javac_is_gcj: + test.skip_test('Test not valid for gcj (gnu java); skipping test(s).\n') + +test.write( + 'SConstruct', + """ +DefaultEnvironment(tools=[]) +env = Environment() +env.CacheDir("cache") +env.Java("classes", "source") +""", +) + +test.subdir('source') + +test.write( + ['source', 'Test.java'], + """\ +class Test { class Inner {} } +""", +) + +test.run(arguments='.') + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/Java/JAVAPROCESSORPATH.py scons-4.5.2+dfsg/test/Java/JAVAPROCESSORPATH.py --- scons-4.4.0+dfsg/test/Java/JAVAPROCESSORPATH.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/Java/JAVAPROCESSORPATH.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,95 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Verify that use of $JAVAPROCESSORPATH sets the -processorpath option +on javac compilations. +""" + +import os + +import TestSCons + +test = TestSCons.TestSCons() + +where_javac, java_version = test.java_where_javac() + +test.write('SConstruct', """ +DefaultEnvironment(tools=[]) +env = Environment(tools=['javac'], JAVAPROCESSORPATH=['dir1', 'dir2']) +j1 = env.Java(target='class', source='com/Example1.java') +j2 = env.Java(target='class', source='com/Example2.java') +""") + +test.subdir('com') + +test.write(['com', 'Example1.java'], """\ +package com; + +public class Example1 +{ + + public static void main(String[] args) + { + + } + +} +""") + +test.write(['com', 'Example2.java'], """\ +package com; + +public class Example2 +{ + + public static void main(String[] args) + { + + } + +} +""") + +# Setting -processorpath messes with the Java runtime environment, so +# we'll just take the easy way out and examine the -n output to see if +# the expected option shows up on the command line. + +processorpath = os.pathsep.join(['dir1', 'dir2']) + +expect = """\ +javac -processorpath %(processorpath)s -d class -sourcepath com com.Example1\\.java +javac -processorpath %(processorpath)s -d class -sourcepath com com.Example2\\.java +""" % locals() + +test.run(arguments = '-Q -n .', stdout = expect, match=TestSCons.match_re) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/Libs/LIBPATH.py scons-4.5.2+dfsg/test/Libs/LIBPATH.py --- scons-4.4.0+dfsg/test/Libs/LIBPATH.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Libs/LIBPATH.py 2023-03-21 16:17:04.000000000 +0000 @@ -23,7 +23,6 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - import os.path import time @@ -41,6 +40,7 @@ prog2 = test.workpath(dll_ + 'shlib') + _dll test.write('SConstruct', """\ +DefaultEnvironment(tools=[]) # test speedup env1 = Environment(LIBS=['foo1'], LIBPATH=['$FOO'], FOO='./lib1') f1 = env1.SharedObject('f1', 'f1.c') @@ -118,6 +118,7 @@ #test.up_to_date(arguments = '.') # Change LIBPATH and make sure we don't rebuild because of it. test.write('SConstruct', """\ +DefaultEnvironment(tools=[]) # test speedup env1 = Environment(LIBS=['foo1'], LIBPATH=['./lib1', './lib2']) f1 = env1.SharedObject('f1', 'f1.c') @@ -153,6 +154,7 @@ # Check that a null-string LIBPATH doesn't blow up. test.write('SConstruct', """\ +DefaultEnvironment(tools=[]) # test speedup env = Environment(LIBPATH='') env.Library('foo', source='empty.c') """) diff -Nru scons-4.4.0+dfsg/test/Libs/LIBS.py scons-4.5.2+dfsg/test/Libs/LIBS.py --- scons-4.4.0+dfsg/test/Libs/LIBS.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Libs/LIBS.py 2023-03-21 16:17:04.000000000 +0000 @@ -51,6 +51,7 @@ slprog_exe = test.workpath('slprog' + _exe) test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env = Environment(LIBS=['bar'], LIBPATH = '.') env.Program(target='foo1', source='foo1.c') env2 = Environment(LIBS=[File(r'%s')], LIBPATH = '.') @@ -161,6 +162,7 @@ # test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env = Environment(LIBS=['baz']) env.Program(target='foo1', source='foo1.c', LIBS=['$LIBS', 'bar'], LIBPATH = '.') SConscript('sub1/SConscript', 'env') @@ -173,6 +175,7 @@ # test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env = Environment(LIBS=['bar', 'baz'], LIBPATH = '.') env.Program(target='foo1', source='foo1.c') SConscript('sub1/SConscript', 'env') @@ -191,6 +194,7 @@ # test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env = Environment() env.Program(target='foo1', source='foo1.c', LIBS=['bar', 'baz'], LIBPATH = '.') SConscript('sub1/SConscript', 'env') diff -Nru scons-4.4.0+dfsg/test/Libs/SharedLibraryIxes.py scons-4.5.2+dfsg/test/Libs/SharedLibraryIxes.py --- scons-4.4.0+dfsg/test/Libs/SharedLibraryIxes.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Libs/SharedLibraryIxes.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test that we can build shared libraries and link against shared @@ -50,6 +49,7 @@ isWindows = False isMingw = True +DefaultEnvironment(tools=[]) # test speedup env = Environment() # Make sure that the shared library can be located at runtime. diff -Nru scons-4.4.0+dfsg/test/MSVC/embed-manifest.py scons-4.5.2+dfsg/test/MSVC/embed-manifest.py --- scons-4.4.0+dfsg/test/MSVC/embed-manifest.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/MSVC/embed-manifest.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,94 +1,94 @@ -#!/usr/bin/env python -# -# __COPYRIGHT__ -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" - -""" -Verify that manifest files get embedded correctly in EXEs and DLLs -""" - -import TestSCons - -_exe = TestSCons._exe -_dll = TestSCons._dll -_lib = TestSCons._lib - -test = TestSCons.TestSCons() - -test.skip_if_not_msvc() - -test.write('SConstruct', """\ -env=Environment(WINDOWS_EMBED_MANIFEST=True) -env.Append(CCFLAGS = '/MD') -env.Append(LINKFLAGS = '/MANIFEST') -env.Append(SHLINKFLAGS = '/MANIFEST') -exe=env.Program('test.cpp') -dll=env.SharedLibrary('testdll.cpp') -env.Command('exe-extracted.manifest', exe, - '$MT /nologo -inputresource:${SOURCE};1 -out:${TARGET}') -env.Command('dll-extracted.manifest', dll, - '$MT /nologo -inputresource:${SOURCE};2 -out:${TARGET}') -env2=Environment(WINDOWS_EMBED_MANIFEST=True) # no /MD here -env2.Program('test-nomanifest', env2.Object('test-nomanifest', 'test.cpp')) -""") - -test.write('test.cpp', """\ -#include -#include -int -main(int argc, char *argv) -{ - printf("test.cpp\\n"); - exit (0); -} -""") - -test.write('testdll.cpp', """\ -#include -#include - -__declspec(dllexport) int -testdll(int argc, char *argv) -{ - printf("testdll.cpp\\n"); - return 0; -} -""") - -test.run(arguments='.') - -test.must_exist('test%s' % _exe) -test.must_exist('test%s.manifest' % _exe) -test.must_contain('exe-extracted.manifest', '', mode='r') -test.must_exist('testdll%s' % _dll) -test.must_exist('testdll%s.manifest' % _dll) -test.must_contain('dll-extracted.manifest', '', mode='r') - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: +#!/usr/bin/env python +# +# __COPYRIGHT__ +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" + +""" +Verify that manifest files get embedded correctly in EXEs and DLLs +""" + +import TestSCons + +_exe = TestSCons._exe +_dll = TestSCons._dll +_lib = TestSCons._lib + +test = TestSCons.TestSCons() + +test.skip_if_not_msvc() + +test.write('SConstruct', """\ +env=Environment(WINDOWS_EMBED_MANIFEST=True) +env.Append(CCFLAGS = '/MD') +env.Append(LINKFLAGS = '/MANIFEST') +env.Append(SHLINKFLAGS = '/MANIFEST') +exe=env.Program('test.cpp') +dll=env.SharedLibrary('testdll.cpp') +env.Command('exe-extracted.manifest', exe, + '$MT /nologo -inputresource:${SOURCE};1 -out:${TARGET}') +env.Command('dll-extracted.manifest', dll, + '$MT /nologo -inputresource:${SOURCE};2 -out:${TARGET}') +env2=Environment(WINDOWS_EMBED_MANIFEST=True) # no /MD here +env2.Program('test-nomanifest', env2.Object('test-nomanifest', 'test.cpp')) +""") + +test.write('test.cpp', """\ +#include +#include +int +main(int argc, char *argv) +{ + printf("test.cpp\\n"); + exit (0); +} +""") + +test.write('testdll.cpp', """\ +#include +#include + +__declspec(dllexport) int +testdll(int argc, char *argv) +{ + printf("testdll.cpp\\n"); + return 0; +} +""") + +test.run(arguments='.') + +test.must_exist('test%s' % _exe) +test.must_exist('test%s.manifest' % _exe) +test.must_contain('exe-extracted.manifest', '', mode='r') +test.must_exist('testdll%s' % _dll) +test.must_exist('testdll%s.manifest' % _dll) +test.must_contain('dll-extracted.manifest', '', mode='r') + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/MSVC/PCH-source.py scons-4.5.2+dfsg/test/MSVC/PCH-source.py --- scons-4.4.0+dfsg/test/MSVC/PCH-source.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/MSVC/PCH-source.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,102 +1,102 @@ -#!/usr/bin/env python -# -# __COPYRIGHT__ -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" - -""" -Test use of pre-compiled headers when the source .cpp file shows -up in both the env.PCH() and the env.Program() source list. - -Issue 2505: http://github.com/SCons/scons/issues/2505 -""" - -import TestSCons - -test = TestSCons.TestSCons() - -test.skip_if_not_msvc() - -test.write('SConstruct', """\ -env = Environment(tools=['msvc', 'mslink']) -env['PCH'] = env.PCH('Source1.cpp')[0] -env['PCHSTOP'] = 'Header1.hpp' -env.Program('foo', ['foo.cpp', 'Source2.cpp', 'Source1.cpp']) -""" % locals()) - -test.write('Header1.hpp', r""" -""") - -test.write('Source1.cpp', r""" -#include - -#include "Header1.hpp" - -void -Source1(void) { - printf("Source1.cpp\n"); -} -""") - -test.write('Source2.cpp', r""" -#include - -#include "Header1.hpp" - -void -Source2(void) { - printf("Source2.cpp\n"); -} -""") - -test.write('foo.cpp', r""" -#include - -#include "Header1.hpp" - -void Source1(void); -void Source2(void); - -int -main(int argc, char *argv[]) -{ - Source1(); - Source2(); - printf("foo.cpp\n"); -} -""") - -test.run(arguments = ".") - -test.run(program=test.workpath('foo'+TestSCons._exe), - stdout="Source1.cpp\nSource2.cpp\nfoo.cpp\n") - - - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: +#!/usr/bin/env python +# +# __COPYRIGHT__ +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" + +""" +Test use of pre-compiled headers when the source .cpp file shows +up in both the env.PCH() and the env.Program() source list. + +Issue 2505: http://github.com/SCons/scons/issues/2505 +""" + +import TestSCons + +test = TestSCons.TestSCons() + +test.skip_if_not_msvc() + +test.write('SConstruct', """\ +env = Environment(tools=['msvc', 'mslink']) +env['PCH'] = env.PCH('Source1.cpp')[0] +env['PCHSTOP'] = 'Header1.hpp' +env.Program('foo', ['foo.cpp', 'Source2.cpp', 'Source1.cpp']) +""" % locals()) + +test.write('Header1.hpp', r""" +""") + +test.write('Source1.cpp', r""" +#include + +#include "Header1.hpp" + +void +Source1(void) { + printf("Source1.cpp\n"); +} +""") + +test.write('Source2.cpp', r""" +#include + +#include "Header1.hpp" + +void +Source2(void) { + printf("Source2.cpp\n"); +} +""") + +test.write('foo.cpp', r""" +#include + +#include "Header1.hpp" + +void Source1(void); +void Source2(void); + +int +main(int argc, char *argv[]) +{ + Source1(); + Source2(); + printf("foo.cpp\n"); +} +""") + +test.run(arguments = ".") + +test.run(program=test.workpath('foo'+TestSCons._exe), + stdout="Source1.cpp\nSource2.cpp\nfoo.cpp\n") + + + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/MSVS/CPPPATH-Dirs.py scons-4.5.2+dfsg/test/MSVS/CPPPATH-Dirs.py --- scons-4.4.0+dfsg/test/MSVS/CPPPATH-Dirs.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/MSVS/CPPPATH-Dirs.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,94 +1,94 @@ - -#!/usr/bin/env python -# -# __COPYRIGHT__ -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" - -""" -Test that MSVS generation works when CPPPATH contains Dir nodes. -Also make sure changing CPPPATH causes rebuild. -""" - -import os -import sys - -import TestSConsMSVS - -test = TestSConsMSVS.TestSConsMSVS() - -if sys.platform != 'win32': - msg = "Skipping Visual Studio test on non-Windows platform '%s'\n" % sys.platform - test.skip_test(msg) - -import SCons.Tool.MSCommon as msc -if not msc.msvs_exists(): - msg = "No MSVS toolchain found...skipping test\n" - test.skip_test(msg) - -SConscript_contents = """\ -env = Environment() - -sources = ['main.cpp'] - -program = env.Program(target = 'hello', source = sources) - -if ARGUMENTS.get('moreincludes'): - env.AppendUnique(CPPPATH = [env.Dir('.'), env.Dir('myincludes')]) -else: - env.AppendUnique(CPPPATH = [env.Dir('.')]) - -env.MSVSProject(target = 'Hello' + env['MSVSPROJECTSUFFIX'], - srcs = sources, - buildtarget = program, - variant = 'Release') -""" - -test.write('SConstruct', SConscript_contents) - -test.write('main.cpp', """\ -#include -int main(void) { - printf("hello, world!\\n"); -} -""") - -test.run() - -if not os.path.exists(test.workpath('Hello.vcproj')) and \ - not os.path.exists(test.workpath('Hello.vcxproj')): - test.fail_test("Failed to create Visual Studio project Hello.vcproj or Hello.vcxproj") -test.must_exist(test.workpath('Hello.sln')) -# vcproj = test.read('Test.vcproj', 'r') - -test.run(arguments='moreincludes=1') -test.must_not_contain_any_line(test.stdout(), ['is up to date']) -test.must_contain_all_lines(test.stdout(), ['Adding', 'Hello']) - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: + +#!/usr/bin/env python +# +# __COPYRIGHT__ +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" + +""" +Test that MSVS generation works when CPPPATH contains Dir nodes. +Also make sure changing CPPPATH causes rebuild. +""" + +import os +import sys + +import TestSConsMSVS + +test = TestSConsMSVS.TestSConsMSVS() + +if sys.platform != 'win32': + msg = "Skipping Visual Studio test on non-Windows platform '%s'\n" % sys.platform + test.skip_test(msg) + +import SCons.Tool.MSCommon as msc +if not msc.msvs_exists(): + msg = "No MSVS toolchain found...skipping test\n" + test.skip_test(msg) + +SConscript_contents = """\ +env = Environment() + +sources = ['main.cpp'] + +program = env.Program(target = 'hello', source = sources) + +if ARGUMENTS.get('moreincludes'): + env.AppendUnique(CPPPATH = [env.Dir('.'), env.Dir('myincludes')]) +else: + env.AppendUnique(CPPPATH = [env.Dir('.')]) + +env.MSVSProject(target = 'Hello' + env['MSVSPROJECTSUFFIX'], + srcs = sources, + buildtarget = program, + variant = 'Release') +""" + +test.write('SConstruct', SConscript_contents) + +test.write('main.cpp', """\ +#include +int main(void) { + printf("hello, world!\\n"); +} +""") + +test.run() + +if not os.path.exists(test.workpath('Hello.vcproj')) and \ + not os.path.exists(test.workpath('Hello.vcxproj')): + test.fail_test("Failed to create Visual Studio project Hello.vcproj or Hello.vcxproj") +test.must_exist(test.workpath('Hello.sln')) +# vcproj = test.read('Test.vcproj', 'r') + +test.run(arguments='moreincludes=1') +test.must_not_contain_any_line(test.stdout(), ['is up to date']) +test.must_contain_all_lines(test.stdout(), ['Adding', 'Hello']) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/ninja/build_libraries.py scons-4.5.2+dfsg/test/ninja/build_libraries.py --- scons-4.4.0+dfsg/test/ninja/build_libraries.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/ninja/build_libraries.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,5 +1,7 @@ #!/usr/bin/env python # +# MIT License +# # Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining @@ -34,10 +36,6 @@ except ImportError: test.skip_test("Could not find ninja module. Skipping test.\n") -ninja_binary = test.where_is('ninja') -if not ninja_binary: - test.skip_test("Could not find ninja executable. Skipping test.\n") - ninja_bin = os.path.abspath(os.path.join( ninja.__file__, os.pardir, diff -Nru scons-4.4.0+dfsg/test/ninja/shell_command.py scons-4.5.2+dfsg/test/ninja/shell_command.py --- scons-4.4.0+dfsg/test/ninja/shell_command.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/ninja/shell_command.py 2023-03-21 16:17:04.000000000 +0000 @@ -53,6 +53,12 @@ DefaultEnvironment(tools=[]) env = Environment() + +# Added to verify that SCons Ninja tool is sanitizing the shell environment +# before it spawns a new shell +env['ENV']['ZPATH']=['/a/b/c','/c/d/e'] + + env.Tool('ninja') prog = env.Program(target = 'foo', source = 'foo.c') env.Command('foo.out', prog, '%(shell)sfoo%(_exe)s > foo.out') diff -Nru scons-4.4.0+dfsg/test/NoClean.py scons-4.5.2+dfsg/test/NoClean.py --- scons-4.4.0+dfsg/test/NoClean.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/NoClean.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,97 +1,97 @@ -#!/usr/bin/env python -# -# __COPYRIGHT__ -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" - -# -# This test ensures that NoClean works correctly, even when it's applied to -# a single target in the return list of an multi-target Builder. -# -import TestSCons - -test = TestSCons.TestSCons() - -test.write('SConstruct', """ -def action(target, source, env): - for t in target: - with open(t.get_internal_path(), 'w'): - pass -Command('1.out', 'SConstruct', action) -NoClean('1.out') -""") - -test.write('SConstruct.force', """ -def action(target, source, env): - for t in target: - with open(t.get_internal_path(), 'w'): - pass - with open('4.out', 'w'): - pass -res = Command('3.out', 'SConstruct.force', action) -Clean('4.out', res) -NoClean('4.out') -""") - -test.write('SConstruct.multi', """ -def action(target, source, env): - for t in target: - with open(t.get_internal_path(), 'w'): - pass -Command(['5.out', '6.out'], 'SConstruct.multi', action) -NoClean('6.out') -""") - -# -# Basic check: NoClean keeps files -# -test.run() -test.run(arguments='-c') - -test.must_exist('1.out') - -# -# Check: NoClean overrides Clean -# -test.run(arguments=['-f', 'SConstruct.force']) -test.run(arguments=['-f', 'SConstruct.force', '-c']) - -test.must_not_exist('3.out') -test.must_exist('4.out') - -# -# Check: NoClean works for multi-target Builders -# -test.run(arguments=['-f', 'SConstruct.multi']) -test.run(arguments=['-f', 'SConstruct.multi', '-c']) - -test.must_not_exist('5.out') -test.must_exist('6.out') - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: +#!/usr/bin/env python +# +# __COPYRIGHT__ +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" + +# +# This test ensures that NoClean works correctly, even when it's applied to +# a single target in the return list of an multi-target Builder. +# +import TestSCons + +test = TestSCons.TestSCons() + +test.write('SConstruct', """ +def action(target, source, env): + for t in target: + with open(t.get_internal_path(), 'w'): + pass +Command('1.out', 'SConstruct', action) +NoClean('1.out') +""") + +test.write('SConstruct.force', """ +def action(target, source, env): + for t in target: + with open(t.get_internal_path(), 'w'): + pass + with open('4.out', 'w'): + pass +res = Command('3.out', 'SConstruct.force', action) +Clean('4.out', res) +NoClean('4.out') +""") + +test.write('SConstruct.multi', """ +def action(target, source, env): + for t in target: + with open(t.get_internal_path(), 'w'): + pass +Command(['5.out', '6.out'], 'SConstruct.multi', action) +NoClean('6.out') +""") + +# +# Basic check: NoClean keeps files +# +test.run() +test.run(arguments='-c') + +test.must_exist('1.out') + +# +# Check: NoClean overrides Clean +# +test.run(arguments=['-f', 'SConstruct.force']) +test.run(arguments=['-f', 'SConstruct.force', '-c']) + +test.must_not_exist('3.out') +test.must_exist('4.out') + +# +# Check: NoClean works for multi-target Builders +# +test.run(arguments=['-f', 'SConstruct.multi']) +test.run(arguments=['-f', 'SConstruct.multi', '-c']) + +test.must_not_exist('5.out') +test.must_exist('6.out') + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/NodeOps.py scons-4.5.2+dfsg/test/NodeOps.py --- scons-4.4.0+dfsg/test/NodeOps.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/NodeOps.py 2023-03-21 16:17:04.000000000 +0000 @@ -50,6 +50,7 @@ test.subdir('bld', 'src', ['src', 'subsrcdir']) sconstruct = r""" +DefaultEnvironment(tools=[]) # test speedup foo = Environment(SHOBJPREFIX='', WINDOWS_INSERT_DEF=1) foo.Append(SHCXXFLAGS = '-DFOO') bar = Environment(SHOBJPREFIX='', WINDOWS_INSERT_DEF=1) diff -Nru scons-4.4.0+dfsg/test/no-global-dependencies.py scons-4.5.2+dfsg/test/no-global-dependencies.py --- scons-4.4.0+dfsg/test/no-global-dependencies.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/no-global-dependencies.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test that files are correctly located in the variant directory even when diff -Nru scons-4.4.0+dfsg/test/option/fixture/SConstruct__taskmastertrace scons-4.5.2+dfsg/test/option/fixture/SConstruct__taskmastertrace --- scons-4.4.0+dfsg/test/option/fixture/SConstruct__taskmastertrace 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/option/fixture/SConstruct__taskmastertrace 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,9 @@ +DefaultEnvironment(tools=[]) +env = Environment(tools=[]) + +# We name the files 'Tfile' so that they will sort after the SConstruct +# file regardless of whether the test is being run on a case-sensitive +# or case-insensitive system. + +env.Command('Tfile.out', 'Tfile.mid', Copy('$TARGET', '$SOURCE')) +env.Command('Tfile.mid', 'Tfile.in', Copy('$TARGET', '$SOURCE')) diff -Nru scons-4.4.0+dfsg/test/option/fixture/taskmaster_expected_file_1.txt scons-4.5.2+dfsg/test/option/fixture/taskmaster_expected_file_1.txt --- scons-4.4.0+dfsg/test/option/fixture/taskmaster_expected_file_1.txt 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/option/fixture/taskmaster_expected_file_1.txt 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,74 @@ + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: +Taskmaster: +Taskmaster: +Taskmaster: adjusted ref count: , child 'SConstruct' +Taskmaster: adjusted ref count: , child 'Tfile.in' +Taskmaster: adjusted ref count: , child 'Tfile.mid' +Taskmaster: adjusted ref count: , child 'Tfile.out' +Taskmaster: Considering node and its children: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.executed_with_callbacks(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.executed_with_callbacks(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.execute(): node +Task.executed_with_callbacks(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.execute(): node +Task.executed_with_callbacks(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: +Taskmaster: +Taskmaster: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.execute(): node +Task.executed_with_callbacks(): node +Task.postprocess(): node + +Taskmaster: Looking for a node to evaluate +Taskmaster: No candidate anymore. diff -Nru scons-4.4.0+dfsg/test/option/fixture/taskmaster_expected_new_parallel.txt scons-4.5.2+dfsg/test/option/fixture/taskmaster_expected_new_parallel.txt --- scons-4.4.0+dfsg/test/option/fixture/taskmaster_expected_new_parallel.txt 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/option/fixture/taskmaster_expected_new_parallel.txt 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,90 @@ +Job.NewParallel._work(): [Thread:XXXXX] Gained exclusive access +Job.NewParallel._work(): [Thread:XXXXX] Starting search +Job.NewParallel._work(): [Thread:XXXXX] Found {len(results_queue)} completed tasks to process +Job.NewParallel._work(): [Thread:XXXXX] Searching for new tasks + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: +Taskmaster: +Taskmaster: +Taskmaster: adjusted ref count: , child 'SConstruct' +Taskmaster: adjusted ref count: , child 'Tfile.in' +Taskmaster: adjusted ref count: , child 'Tfile.mid' +Taskmaster: adjusted ref count: , child 'Tfile.out' +Taskmaster: Considering node and its children: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Job.NewParallel._work(): [Thread:XXXXX] Found internal task +Task.executed_with_callbacks(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count +Job.NewParallel._work(): [Thread:XXXXX] Searching for new tasks + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Job.NewParallel._work(): [Thread:XXXXX] Found internal task +Task.executed_with_callbacks(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count +Job.NewParallel._work(): [Thread:XXXXX] Searching for new tasks + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Job.NewParallel._work(): [Thread:XXXXX] Found internal task +Task.executed_with_callbacks(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count +Job.NewParallel._work(): [Thread:XXXXX] Searching for new tasks + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Job.NewParallel._work(): [Thread:XXXXX] Found internal task +Task.executed_with_callbacks(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count +Job.NewParallel._work(): [Thread:XXXXX] Searching for new tasks + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: +Taskmaster: +Taskmaster: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Job.NewParallel._work(): [Thread:XXXXX] Found internal task +Task.executed_with_callbacks(): node +Task.postprocess(): node +Job.NewParallel._work(): [Thread:XXXXX] Searching for new tasks + +Taskmaster: Looking for a node to evaluate +Taskmaster: No candidate anymore. +Job.NewParallel._work(): [Thread:XXXXX] Found no task requiring execution, and have no jobs: marking complete +Job.NewParallel._work(): [Thread:XXXXX] Gained exclusive access +Job.NewParallel._work(): [Thread:XXXXX] Completion detected, breaking from main loop +Job.NewParallel._work(): [Thread:XXXXX] Gained exclusive access +Job.NewParallel._work(): [Thread:XXXXX] Completion detected, breaking from main loop diff -Nru scons-4.4.0+dfsg/test/option/fixture/taskmaster_expected_stdout_1.txt scons-4.5.2+dfsg/test/option/fixture/taskmaster_expected_stdout_1.txt --- scons-4.4.0+dfsg/test/option/fixture/taskmaster_expected_stdout_1.txt 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/option/fixture/taskmaster_expected_stdout_1.txt 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,76 @@ + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: +Taskmaster: +Taskmaster: +Taskmaster: adjusted ref count: , child 'SConstruct' +Taskmaster: adjusted ref count: , child 'Tfile.in' +Taskmaster: adjusted ref count: , child 'Tfile.mid' +Taskmaster: adjusted ref count: , child 'Tfile.out' +Taskmaster: Considering node and its children: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.executed_with_callbacks(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.executed_with_callbacks(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.execute(): node +Copy("Tfile.mid", "Tfile.in") +Task.executed_with_callbacks(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.execute(): node +Copy("Tfile.out", "Tfile.mid") +Task.executed_with_callbacks(): node +Task.postprocess(): node +Task.postprocess(): removing +Task.postprocess(): adjusted parent ref count + +Taskmaster: Looking for a node to evaluate +Taskmaster: Considering node and its children: +Taskmaster: +Taskmaster: +Taskmaster: +Taskmaster: +Taskmaster: Evaluating + +Task.make_ready_current(): node +Task.prepare(): node +Task.execute(): node +Task.executed_with_callbacks(): node +Task.postprocess(): node + +Taskmaster: Looking for a node to evaluate +Taskmaster: No candidate anymore. diff -Nru scons-4.4.0+dfsg/test/option/option--experimental.py scons-4.5.2+dfsg/test/option/option--experimental.py --- scons-4.4.0+dfsg/test/option/option--experimental.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/option/option--experimental.py 2023-03-21 16:17:04.000000000 +0000 @@ -36,12 +36,13 @@ tests = [ ('.', []), ('--experimental=ninja', ['ninja']), - ('--experimental=all', ['ninja', 'transporter', 'warp_speed']), + ('--experimental=tm_v2', ['tm_v2']), + ('--experimental=all', ['ninja', 'tm_v2', 'transporter', 'warp_speed']), ('--experimental=none', []), ] for args, exper in tests: - read_string = """All Features=ninja,transporter,warp_speed + read_string = """All Features=ninja,tm_v2,transporter,warp_speed Experimental=%s """ % (exper) test.run(arguments=args, @@ -50,7 +51,7 @@ test.run(arguments='--experimental=warp_drive', stderr="""usage: scons [OPTIONS] [VARIABLES] [TARGETS] -SCons Error: option --experimental: invalid choice: 'warp_drive' (choose from 'all','none','ninja','transporter','warp_speed') +SCons Error: option --experimental: invalid choice: 'warp_drive' (choose from 'all','none','ninja','tm_v2','transporter','warp_speed') """, status=2) diff -Nru scons-4.4.0+dfsg/test/option/option-n.py scons-4.5.2+dfsg/test/option/option-n.py --- scons-4.4.0+dfsg/test/option/option-n.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/option/option-n.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -58,8 +60,8 @@ """) test.write('SConstruct', """ -DefaultEnvironment(tools=[]) MyBuild = Builder(action=r'%(_python_)s build.py $TARGETS') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS={'MyBuild': MyBuild}, tools=[]) env.Tool('install') env.MyBuild(target='f1.out', source='f1.in') @@ -172,7 +174,6 @@ test.set_match_function(TestSCons.match_re_dotall) test.set_diff_function(TestSCons.diff_re) test.write('configure/SConstruct', """\ -DefaultEnvironment(tools=[]) def CustomTest(context): def userAction(target,source,env): import shutil @@ -185,6 +186,7 @@ context.Result(ok) return ok +DefaultEnvironment(tools=[]) # test speedup env = Environment(tools=[]) conf = Configure(env, custom_tests={'CustomTest':CustomTest}, diff -Nru scons-4.4.0+dfsg/test/option/option--Y.py scons-4.5.2+dfsg/test/option/option--Y.py --- scons-4.4.0+dfsg/test/option/option--Y.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/option/option--Y.py 2023-03-21 16:17:04.000000000 +0000 @@ -169,6 +169,7 @@ work2_foo = test.workpath('work2', 'foo' + _exe) SConstruct = """ +DefaultEnvironment(tools=[]) # test speedup env = Environment() env.Program(target = 'foo', source = 'foo.c') """ diff -Nru scons-4.4.0+dfsg/test/option/stack-size.py scons-4.5.2+dfsg/test/option/stack-size.py --- scons-4.4.0+dfsg/test/option/stack-size.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/option/stack-size.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,7 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License # +# Copyright The SCons Foundation # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including @@ -20,9 +21,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import TestSCons @@ -47,8 +45,8 @@ test.write(['work1', 'SConstruct'], """ -DefaultEnvironment(tools=[]) B = Builder(action = r'%(_python_)s ../build.py $TARGETS $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(tools=[], BUILDERS = { 'B' : B }) f1 = env.B(target = 'f1.out', source = 'f1.in') f2 = env.B(target = 'f2.out', source = 'f2.in') @@ -62,6 +60,7 @@ test.write(['work2', 'SConstruct'], """ SetOption('stack_size', 128) B = Builder(action = r'%(_python_)s ../build.py $TARGETS $SOURCES') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILDERS = { 'B' : B }) f1 = env.B(target = 'f1.out', source = 'f1.in') f2 = env.B(target = 'f2.out', source = 'f2.in') diff -Nru scons-4.4.0+dfsg/test/option/taskmastertrace.py scons-4.5.2+dfsg/test/option/taskmastertrace.py --- scons-4.4.0+dfsg/test/option/taskmastertrace.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/option/taskmastertrace.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,111 +22,25 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Simple tests of the --taskmastertrace= option. """ +import os +import re import TestSCons test = TestSCons.TestSCons() -test.write('SConstruct', """ -DefaultEnvironment(tools=[]) -env = Environment(tools=[]) - -# We name the files 'Tfile' so that they will sort after the SConstruct -# file regardless of whether the test is being run on a case-sensitive -# or case-insensitive system. - -env.Command('Tfile.out', 'Tfile.mid', Copy('$TARGET', '$SOURCE')) -env.Command('Tfile.mid', 'Tfile.in', Copy('$TARGET', '$SOURCE')) -""") +test.file_fixture('fixture/SConstruct__taskmastertrace', 'SConstruct') +test.file_fixture('fixture/taskmaster_expected_stdout_1.txt', 'taskmaster_expected_stdout_1.txt') +test.file_fixture('fixture/taskmaster_expected_file_1.txt', 'taskmaster_expected_file_1.txt') +test.file_fixture('fixture/taskmaster_expected_new_parallel.txt', 'taskmaster_expected_new_parallel.txt') test.write('Tfile.in', "Tfile.in\n") -expect_stdout = test.wrap_stdout("""\ - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: -Taskmaster: -Taskmaster: -Taskmaster: -Taskmaster: adjusted ref count: , child 'SConstruct' -Taskmaster: adjusted ref count: , child 'Tfile.in' -Taskmaster: adjusted ref count: , child 'Tfile.mid' -Taskmaster: adjusted ref count: , child 'Tfile.out' -Taskmaster: Considering node and its children: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.executed_with_callbacks(): node -Task.postprocess(): node -Task.postprocess(): removing -Task.postprocess(): adjusted parent ref count - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.executed_with_callbacks(): node -Task.postprocess(): node -Task.postprocess(): removing -Task.postprocess(): adjusted parent ref count - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.execute(): node -Copy("Tfile.mid", "Tfile.in") -Task.executed_with_callbacks(): node -Task.postprocess(): node -Task.postprocess(): removing -Task.postprocess(): adjusted parent ref count - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.execute(): node -Copy("Tfile.out", "Tfile.mid") -Task.executed_with_callbacks(): node -Task.postprocess(): node -Task.postprocess(): removing -Task.postprocess(): adjusted parent ref count - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: -Taskmaster: -Taskmaster: -Taskmaster: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.execute(): node -Task.executed_with_callbacks(): node -Task.postprocess(): node - -Taskmaster: Looking for a node to evaluate -Taskmaster: No candidate anymore. - -""") +expect_stdout = test.wrap_stdout(test.read('taskmaster_expected_stdout_1.txt', mode='r')) test.run(arguments='--taskmastertrace=- .', stdout=expect_stdout) @@ -136,86 +52,15 @@ """) test.run(arguments='--taskmastertrace=trace.out .', stdout=expect_stdout) +test.must_match_file('trace.out', 'taskmaster_expected_file_1.txt', mode='r') -expect_trace = """\ - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: -Taskmaster: -Taskmaster: -Taskmaster: -Taskmaster: adjusted ref count: , child 'SConstruct' -Taskmaster: adjusted ref count: , child 'Tfile.in' -Taskmaster: adjusted ref count: , child 'Tfile.mid' -Taskmaster: adjusted ref count: , child 'Tfile.out' -Taskmaster: Considering node and its children: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.executed_with_callbacks(): node -Task.postprocess(): node -Task.postprocess(): removing -Task.postprocess(): adjusted parent ref count - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.executed_with_callbacks(): node -Task.postprocess(): node -Task.postprocess(): removing -Task.postprocess(): adjusted parent ref count - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.execute(): node -Task.executed_with_callbacks(): node -Task.postprocess(): node -Task.postprocess(): removing -Task.postprocess(): adjusted parent ref count - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.execute(): node -Task.executed_with_callbacks(): node -Task.postprocess(): node -Task.postprocess(): removing -Task.postprocess(): adjusted parent ref count - -Taskmaster: Looking for a node to evaluate -Taskmaster: Considering node and its children: -Taskmaster: -Taskmaster: -Taskmaster: -Taskmaster: -Taskmaster: Evaluating - -Task.make_ready_current(): node -Task.prepare(): node -Task.execute(): node -Task.executed_with_callbacks(): node -Task.postprocess(): node - -Taskmaster: Looking for a node to evaluate -Taskmaster: No candidate anymore. - -""" +# Test NewParallel Job implementation +test.run(arguments='-j 2 --experimental=tm_v2 --taskmastertrace=new_parallel_trace.out .') -test.must_match('trace.out', expect_trace, mode='r') +new_trace = test.read('new_parallel_trace.out', mode='r') +thread_id = re.compile(r'\[Thread:\d+\]') +new_trace=thread_id.sub('[Thread:XXXXX]', new_trace) +test.must_match('taskmaster_expected_new_parallel.txt', new_trace, mode='r') test.pass_test() diff -Nru scons-4.4.0+dfsg/test/Parallel/multiple-parents.py scons-4.5.2+dfsg/test/Parallel/multiple-parents.py --- scons-4.4.0+dfsg/test/Parallel/multiple-parents.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Parallel/multiple-parents.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,14 +22,11 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# """ Verify that a failed build action with -j works as expected. """ -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" - import sys import TestSCons @@ -58,67 +57,77 @@ test.write('SConstruct', """ vars = Variables() -vars.Add( BoolVariable('interrupt', 'Interrupt the build.', 0 ) ) +vars.Add(BoolVariable('interrupt', 'Interrupt the build.', False)) +DefaultEnvironment(tools=[]) # test speedup varEnv = Environment(variables=vars) -def fail_action(target = None, source = None, env = None): +def fail_action(target=None, source=None, env=None): return 2 -def simulate_keyboard_interrupt(target = None, source = None, env = None): +def simulate_keyboard_interrupt(target=None, source=None, env=None): # Directly invoked the SIGINT handler to simulate a # KeyboardInterrupt. This hack is necessary because there is no # easy way to get access to the current Job/Taskmaster object. import signal + handler = signal.getsignal(signal.SIGINT) handler(signal.SIGINT, None) return 0 -interrupt = Command(target='interrupt', source='', action=simulate_keyboard_interrupt) +interrupt = Command(target='interrupt', source='', action=simulate_keyboard_interrupt) touch0 = Touch('${TARGETS[0]}') touch1 = Touch('${TARGETS[1]}') touch2 = Touch('${TARGETS[2]}') -failed0 = Command(target='failed00', source='', action=fail_action) -ok0 = Command(target=['ok00a', 'ok00b', 'ok00c'], - source='', - action=[touch0, touch1, touch2]) -prereq0 = Command(target='prereq00', source='', action=touch0) -ignore0 = Command(target='ignore00', source='', action=touch0) -igreq0 = Command(target='igreq00', source='', action=touch0) +failed0 = Command(target='failed00', source='', action=fail_action) +ok0 = Command( + target=['ok00a', 'ok00b', 'ok00c'], + source='', + action=[touch0, touch1, touch2], +) +prereq0 = Command(target='prereq00', source='', action=touch0) +ignore0 = Command(target='ignore00', source='', action=touch0) +igreq0 = Command(target='igreq00', source='', action=touch0) missing0 = Command(target='missing00', source='MissingSrc', action=touch0) -withSE0 = Command(target=['withSE00a', 'withSE00b', 'withSE00c'], - source='', - action=[touch0, touch1, touch2, Touch('side_effect')]) -SideEffect('side_effect', withSE0) +withSE0 = Command( + target=['withSE00a', 'withSE00b', 'withSE00c'], + source='', + action=[touch0, touch1, touch2, Touch('side_effect')], +) +SideEffect('side_effect', withSE0) -prev_level = failed0 + ok0 + ignore0 + missing0 + withSE0 +prev_level = failed0 + ok0 + ignore0 + missing0 + withSE0 prev_prereq = prereq0 prev_ignore = ignore0 -prev_igreq = igreq0 +prev_igreq = igreq0 if varEnv['interrupt']: prev_level = prev_level + interrupt -for i in range(1,20): - - failed = Command(target='failed%02d' % i, source='', action=fail_action) - ok = Command(target=['ok%02da' % i, 'ok%02db' % i, 'ok%02dc' % i], - source='', - action=[touch0, touch1, touch2]) - prereq = Command(target='prereq%02d' % i, source='', action=touch0) - ignore = Command(target='ignore%02d' % i, source='', action=touch0) - igreq = Command(target='igreq%02d' % i, source='', action=touch0) - missing = Command(target='missing%02d' %i, source='MissingSrc', action=touch0) - withSE = Command(target=['withSE%02da' % i, 'withSE%02db' % i, 'withSE%02dc' % i], - source='', - action=[touch0, touch1, touch2, Touch('side_effect')]) - SideEffect('side_effect', withSE) +for i in range(1, 20): + + failed = Command(target='failed%02d' % i, source='', action=fail_action) + ok = Command( + target=['ok%02da' % i, 'ok%02db' % i, 'ok%02dc' % i], + source='', + action=[touch0, touch1, touch2], + ) + prereq = Command(target='prereq%02d' % i, source='', action=touch0) + ignore = Command(target='ignore%02d' % i, source='', action=touch0) + igreq = Command(target='igreq%02d' % i, source='', action=touch0) + missing = Command(target='missing%02d' % i, source='MissingSrc', action=touch0) + withSE = Command( + target=['withSE%02da' % i, 'withSE%02db' % i, 'withSE%02dc' % i], + source='', + action=[touch0, touch1, touch2, Touch('side_effect')], + ) + SideEffect('side_effect', withSE) next_level = failed + ok + ignore + igreq + missing + withSE - for j in range(1,10): - a = Alias('a%02d%02d' % (i,j), prev_level) + for j in range(1, 10): + a = Alias('a%02d%02d' % (i, j), prev_level) Requires(a, prev_prereq) Ignore(a, prev_ignore) @@ -128,18 +137,18 @@ next_level = next_level + a - prev_level = next_level + prev_level = next_level prev_prereq = prereq prev_ignore = ignore - prev_igreq = igreq + prev_igreq = igreq all = Alias('all', prev_level) Requires(all, prev_prereq) -Ignore(all, prev_ignore) +Ignore(all, prev_ignore) Requires(all, prev_igreq) -Ignore(all, prev_igreq) +Ignore(all, prev_igreq) Default(all) """) diff -Nru scons-4.4.0+dfsg/test/ParseConfig.py scons-4.5.2+dfsg/test/ParseConfig.py --- scons-4.4.0+dfsg/test/ParseConfig.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/ParseConfig.py 2023-03-21 16:17:04.000000000 +0000 @@ -33,6 +33,7 @@ test_config1 = test.workpath('test-config1') test_config2 = test.workpath('test-config2') test_config3 = test.workpath('test-config3') +test_config4 = test.workpath('test-config4') # 'abc' is supposed to be a static lib; it is included in LIBS as a # File node. @@ -51,6 +52,10 @@ print("-L foo -L lib_dir -isysroot /tmp -arch ppc -arch i386") """) +test.write(test_config4, """\ +print("-D_REENTRANT -lpulse -pthread") +""") + test.write('SConstruct1', """ env = Environment(CPPPATH = [], LIBPATH = [], LIBS = [], CCFLAGS = '-pipe -Wall') @@ -85,6 +90,23 @@ print(env['CCFLAGS']) """ % locals()) +# issue #4321: if CPPDEFINES has been promoted to deque, adding would fail +test.write('SConstruct4', f"""\ +env = Environment( + CPPDEFINES="_REENTRANT", + LIBS=[], + CCFLAGS=[], + LINKFLAGS=[], + PYTHON=r'{_python_}', +) +env.Append(CPPDEFINES="TOOLS_ENABLED") +env.ParseConfig(r"$PYTHON {test_config4} --libs --cflags") +print([str(x) for x in env['CPPDEFINES']]) +print([str(x) for x in env['LIBS']]) +print(env['CCFLAGS']) +print(env['LINKFLAGS']) +""") + good_stdout = """\ ['/usr/include/fum', 'bar'] ['/usr/fax', 'foo', 'lib_dir'] @@ -99,12 +121,21 @@ ['-pipe', '-Wall', ('-isysroot', '/tmp'), ('-arch', 'ppc'), ('-arch', 'i386')] """ +stdout4 = """\ +['TOOLS_ENABLED', '_REENTRANT'] +['pulse'] +['-pthread'] +['-pthread'] +""" + test.run(arguments = "-q -Q -f SConstruct1 .", stdout = good_stdout) test.run(arguments = "-q -Q -f SConstruct2 .", stdout = good_stdout) test.run(arguments = "-q -Q -f SConstruct3 .", stdout = stdout3) +test.run(arguments = "-q -Q -f SConstruct4 .", stdout = stdout4) + test.pass_test() # Local Variables: diff -Nru scons-4.4.0+dfsg/test/print_statement.py scons-4.5.2+dfsg/test/print_statement.py --- scons-4.4.0+dfsg/test/print_statement.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/print_statement.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,56 +0,0 @@ -#!/usr/bin/env python -# -# __COPYRIGHT__ -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" - -import sys -import TestSCons - -test = TestSCons.TestSCons() - - -test.write('SConstruct', """\ -print('python 3 style statement') -Exit(0) -""") - -test.run() - -test.write('SConstruct', """\ -print 'python 2 style statement' -Exit(0) -""") - -if sys.version_info >= (3,0): - test.skip_test('Python 2 print statement test, skipping on Python 3.\n') -else: - test.run() - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/Program.py scons-4.5.2+dfsg/test/Program.py --- scons-4.4.0+dfsg/test/Program.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Program.py 2023-03-21 16:17:04.000000000 +0000 @@ -40,12 +40,13 @@ foo_args = 'foo1%s foo2%s foo3%s foo4%s foo5%s' % (_exe, _exe, _exe, _exe, _exe) test.write('SConstruct', """\ +DefaultEnvironment(tools=[]) # test speedup env = Environment() env.Program(target='foo1', source='f1.c') env.Program(target='foo2', source=Split('f2a.c f2b.c f2c.c')) f3a = File('f3a.c') f3b = File('f3b.c') -Program(target='foo3', source=[f3a, [f3b, 'f3c.c']]) +env.Program(target='foo3', source=[f3a, [f3b, 'f3c.c']]) env.Program('foo4', 'f4.c') env.Program('foo5.c') """) diff -Nru scons-4.4.0+dfsg/test/QT/copied-env.py scons-4.5.2+dfsg/test/QT/copied-env.py --- scons-4.4.0+dfsg/test/QT/copied-env.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/copied-env.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,83 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Test Qt with a copied construction environment. -""" - -import TestSCons - -test = TestSCons.TestSCons() - -test.Qt_dummy_installation() - -test.Qt_create_SConstruct('SConstruct') - -test.write('SConscript', """\ -Import("env") -env.Append(CPPDEFINES = ['FOOBAZ']) - -copy = env.Clone() -copy.Append(CPPDEFINES = ['MYLIB_IMPL']) - -copy.SharedLibrary( - target = 'MyLib', - source = ['MyFile.cpp','MyForm.ui'] -) -""") - -test.write('MyFile.h', r""" -void aaa(void); -""") - -test.write('MyFile.cpp', r""" -#include "MyFile.h" -void useit() { - aaa(); -} -""") - -test.write('MyForm.ui', r""" -void aaa(void) -""") - -test.run(arguments="--warn=no-tool-qt-deprecated") - -moc_MyForm = [x for x in test.stdout().split('\n') if x.find('moc_MyForm') != -1] - -MYLIB_IMPL = [x for x in moc_MyForm if x.find('MYLIB_IMPL') != -1] - -if not MYLIB_IMPL: - print("Did not find MYLIB_IMPL on moc_MyForm compilation line:") - print(test.stdout()) - test.fail_test() - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/CPPPATH-appended.py scons-4.5.2+dfsg/test/QT/CPPPATH-appended.py --- scons-4.4.0+dfsg/test/QT/CPPPATH-appended.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/CPPPATH-appended.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,80 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Test that an appended relative CPPPATH works with generated files. - -This is basically the same as CPPPATH.py, but the include path -is env.Append-ed and everything goes into sub directory "sub". -""" - -import os.path - -import TestSCons - -test = TestSCons.TestSCons() - -test.subdir('sub', ['sub', 'local_include']) - -test.Qt_dummy_installation() - -aaa_exe = os.path.join('sub', 'aaa' + TestSCons._exe) - -test.Qt_create_SConstruct('SConstruct') - -test.write('SConscript', r""" -SConscript('sub/SConscript') -""") - -test.write(['sub', 'SConscript'], r""" -Import("env") -env.Append(CPPPATH=['./local_include']) -env.Program(target = 'aaa', source = 'aaa.cpp') -""") - -test.write(['sub', 'aaa.cpp'], r""" -#include "aaa.h" -int main(void) { aaa(); return 0; } -""") - -test.write(['sub', 'aaa.h'], r""" -#include "my_qobject.h" -#include "local_include.h" -void aaa(void) Q_OBJECT; -""") - -test.write(['sub', 'local_include', 'local_include.h'], r""" -/* empty; just needs to be found */ -""") - -test.run(arguments='--warn=no-tool-qt-deprecated ' + aaa_exe) - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/CPPPATH.py scons-4.5.2+dfsg/test/QT/CPPPATH.py --- scons-4.4.0+dfsg/test/QT/CPPPATH.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/CPPPATH.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,70 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Test that an overwritten CPPPATH works with generated files. -""" - -import TestSCons - -test = TestSCons.TestSCons() - -test.subdir('local_include') - -test.Qt_dummy_installation() - -aaa_exe = 'aaa' + TestSCons._exe - -test.Qt_create_SConstruct('SConstruct') - -test.write('SConscript', """\ -Import("env") -env.Program(target = 'aaa', source = 'aaa.cpp', CPPPATH=['$CPPPATH', './local_include']) -""") - -test.write('aaa.cpp', r""" -#include "aaa.h" -int main(void) { aaa(); return 0; } -""") - -test.write('aaa.h', r""" -#include "my_qobject.h" -#include "local_include.h" -void aaa(void) Q_OBJECT; -""") - -test.write(['local_include', 'local_include.h'], r""" -/* empty; just needs to be found */ -""") - -test.run(arguments='--warn=no-tool-qt-deprecated ' + aaa_exe) - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/empty-env.py scons-4.5.2+dfsg/test/QT/empty-env.py --- scons-4.4.0+dfsg/test/QT/empty-env.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/empty-env.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,79 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Test Qt creation from a copied empty environment. -""" - -import TestSCons - -test = TestSCons.TestSCons() - -test.Qt_dummy_installation('qt') - -test.write('SConstruct', """\ -orig = Environment() -env = orig.Clone(QTDIR = r'%s', - QT_LIB = r'%s', - QT_MOC = r'%s', - QT_UIC = r'%s', - tools=['qt']) -env.Program('main', 'main.cpp', CPPDEFINES=['FOO'], LIBS=[]) -""" % (test.QT, test.QT_LIB, test.QT_MOC, test.QT_UIC)) - -test.write('main.cpp', r""" -#include "foo6.h" -int main(void) { foo6(); return 0; } -""") - -test.write(['qt', 'include', 'foo6.h'], """\ -#include -void -foo6(void) -{ -#ifdef FOO - printf("qt/include/foo6.h\\n"); -#endif -} -""") - -# we can receive warnings about a non detected qt (empty QTDIR) -# these are not critical, but may be annoying. -test.run(stderr=None, arguments='--warn=no-tool-qt-deprecated') - -test.run( - program=test.workpath('main' + TestSCons._exe), - arguments='--warn=no-tool-qt-deprecated', - stderr=None, - stdout='qt/include/foo6.h\n', -) - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/generated-ui.py scons-4.5.2+dfsg/test/QT/generated-ui.py --- scons-4.4.0+dfsg/test/QT/generated-ui.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/generated-ui.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,136 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Test that the UI scanning logic correctly picks up scansG -""" - -import os - -import TestSCons - -test = TestSCons.TestSCons() - -if not os.environ.get('QTDIR', None): - x ="External environment variable $QTDIR not set; skipping test(s).\n" - test.skip_test(x) - -test.subdir(['layer'], - ['layer', 'aclock'], - ['layer', 'aclock', 'qt_bug']) - -test.write(['SConstruct'], """\ -import os -aa=os.getcwd() - -env=Environment(tools=['default','expheaders','qt'],toolpath=[aa]) -if 'HOME' in os.environ: - env['ENV']['HOME'] = os.environ['HOME'] -env["EXP_HEADER_ABS"]=os.path.join(os.getcwd(),'include') -if not os.access(env["EXP_HEADER_ABS"],os.F_OK): - os.mkdir (env["EXP_HEADER_ABS"]) -Export('env') -env.SConscript('layer/aclock/qt_bug/SConscript') -""") - -test.write(['expheaders.py'], """\ -import SCons.Defaults -def ExpHeaderScanner(node, env, path): - return [] -def generate(env): - HeaderAction=SCons.Action.Action([SCons.Defaults.Copy('$TARGET','$SOURCE'),SCons.Defaults.Chmod('$TARGET',0o755)]) - HeaderBuilder= SCons.Builder.Builder(action=HeaderAction) - env['BUILDERS']['ExportHeaders'] = HeaderBuilder -def exists(env): - return 0 -""") - -test.write(['layer', 'aclock', 'qt_bug', 'SConscript'], """\ -import os - -Import ("env") -#src=os.path.join(env.Dir('.').srcnode().abspath, 'testfile.h') -env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'main.h'), 'main.h') -env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'migraform.h'), 'migraform.h') -env.Append(CPPPATH=env["EXP_HEADER_ABS"]) -env.StaticLibrary('all',['main.ui','migraform.ui']) -""") - -test.write(['layer', 'aclock', 'qt_bug', 'main.ui'], """\ - -Main - - - Main - - - - 0 - 0 - 600 - 385 - - - - - migraform.h - - -""") - -test.write(['layer', 'aclock', 'qt_bug', 'migraform.ui'], """\ - -MigrateForm - - - MigrateForm - - - - 0 - 0 - %s - 385 - - - - -""") - -test.run( - arguments='--warn=no-tool-qt-deprecated', - stderr=TestSCons.noisy_ar, - match=TestSCons.match_re_dotall, -) - -test.up_to_date(options="--warn=no-tool-qt-deprecated", arguments=".") - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/installed.py scons-4.5.2+dfsg/test/QT/installed.py --- scons-4.4.0+dfsg/test/QT/installed.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/installed.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,220 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Look if qt is installed, and try out all builders. -""" - -import os -import sys - -import TestSCons - -test = TestSCons.TestSCons() - -if not os.environ.get('QTDIR', None): - x ="External environment variable $QTDIR not set; skipping test(s).\n" - test.skip_test(x) - -test.Qt_dummy_installation() - -QTDIR=os.environ['QTDIR'] - - -test.write('SConstruct', """\ -import os -dummy_env = Environment() -ENV = dummy_env['ENV'] -try: - PATH=ARGUMENTS['PATH'] - if 'PATH' in ENV: - ENV_PATH = PATH + os.pathsep + ENV['PATH'] - else: - Exit(0) # this is certainly a weird system :-) -except KeyError: - ENV_PATH=ENV.get('PATH', '') - -env = Environment(tools=['default','qt'], - ENV={'PATH':ENV_PATH, - 'PATHEXT':os.environ.get('PATHEXT'), - 'HOME':os.getcwd(), - 'SystemRoot':ENV.get('SystemRoot')}, - # moc / uic want to write stuff in ~/.qt - CXXFILESUFFIX=".cpp") - -conf = env.Configure() -if not conf.CheckLib(env.subst("$QT_LIB"), autoadd=0): - conf.env['QT_LIB'] = 'qt-mt' - if not conf.CheckLib(env.subst("$QT_LIB"), autoadd=0): - Exit(0) -env = conf.Finish() -VariantDir('bld', '.') -env.Program('bld/test_realqt', ['bld/mocFromCpp.cpp', - 'bld/mocFromH.cpp', - 'bld/anUiFile.ui', - 'bld/main.cpp']) -""") - -test.write('mocFromCpp.h', """\ -void mocFromCpp(); -""") - -test.write('mocFromCpp.cpp', """\ -#include -#include "mocFromCpp.h" -class MyClass1 : public QObject { - Q_OBJECT - public: - MyClass1() : QObject() {}; - public slots: - void myslot() {}; -}; -void mocFromCpp() { - MyClass1 myclass; -} -#include "mocFromCpp.moc" -""") - -test.write('mocFromH.h', """\ -#include -class MyClass2 : public QObject { - Q_OBJECT; - public: - MyClass2(); - public slots: - void myslot(); -}; -void mocFromH(); -""") - -test.write('mocFromH.cpp', """\ -#include "mocFromH.h" - -MyClass2::MyClass2() : QObject() {} -void MyClass2::myslot() {} -void mocFromH() { - MyClass2 myclass; -} -""") - -test.write('anUiFile.ui', """\ - -MyWidget - - QWidget - - MyWidget - - - MyWidget - - - - anUiFile.ui.h - - - testSlot() - - - -""") - -test.write('anUiFile.ui.h', r""" -#include -#if QT_VERSION >= 0x030100 -void MyWidget::testSlot() -{ - printf("Hello World\n"); -} -#endif -""") - -test.write('main.cpp', r""" -#include -#include "mocFromCpp.h" -#include "mocFromH.h" -#include "anUiFile.h" -#include - -int main(int argc, char **argv) { - QApplication app(argc, argv); - mocFromCpp(); - mocFromH(); - MyWidget mywidget; -#if QT_VERSION >= 0x030100 - mywidget.testSlot(); -#else - printf("Hello World\n"); -#endif - return 0; -} -""") - -test.run(arguments="--warn=no-tool-qt-deprecated bld/test_realqt" + TestSCons._exe) - -test.run( - program=test.workpath("bld", "test_realqt"), - arguments="--warn=no-tool-qt-deprecated", - stdout=None, - status=None, - stderr=None, -) - -if test.stdout() != "Hello World\n" or test.stderr() != '' or test.status: - sys.stdout.write(test.stdout()) - sys.stderr.write(test.stderr()) - # The test might be run on a system that doesn't have an X server - # running, or may be run by an ID that can't connect to the server. - # If so, then print whatever it showed us (which is in and of itself - # an indication that it built correctly) but don't fail the test. - expect = 'cannot connect to X server' - test.fail_test(test.stdout()) - test.fail_test(expect not in test.stderr()) - if test.status != 1 and (test.status >> 8) != 1: - sys.stdout.write('test_realqt returned status %s\n' % test.status) - test.fail_test() - -QTDIR = os.environ['QTDIR'] -PATH = os.environ['PATH'] -os.environ['QTDIR'] = '' -os.environ['PATH'] = '.' - -test.run( - stderr=None, - arguments="--warn=no-tool-qt-deprecated -c bld/test_realqt" + TestSCons._exe, -) - -expect1 = "scons: warning: Could not detect qt, using empty QTDIR" -expect2 = "scons: warning: Could not detect qt, using moc executable as a hint" - -test.fail_test(expect1 not in test.stderr() and expect2 not in test.stderr()) - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/manual.py scons-4.5.2+dfsg/test/QT/manual.py --- scons-4.4.0+dfsg/test/QT/manual.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/manual.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,148 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Test the manual QT builder calls. -""" - -import TestSCons - -test = TestSCons.TestSCons() - -test.subdir('include', 'ui') - -test.Qt_dummy_installation() - -aaa_exe = 'aaa' + TestSCons._exe - -test.Qt_create_SConstruct('SConstruct') - -test.write('SConscript', r""" -Import("env") -sources = ['aaa.cpp', 'bbb.cpp', 'ddd.cpp', 'eee.cpp', 'main.cpp'] - -# normal invocation -sources.append(env.Moc('include/aaa.h')) -moc = env.Moc('bbb.cpp') -env.Ignore( moc, moc ) -sources.extend(env.Uic('ui/ccc.ui')[1:]) - -# manual target specification -sources.append(env.Moc('moc-ddd.cpp', 'include/ddd.h', - QT_MOCHPREFIX='')) # Watch out ! -moc = env.Moc('moc_eee.cpp', 'eee.cpp') -env.Ignore( moc, moc ) -sources.extend(env.Uic(['include/uic_fff.hpp', 'fff.cpp', 'fff.moc.cpp'], - 'ui/fff.ui')[1:]) - -print(list(map(str,sources))) -env.Program(target='aaa', - source=sources, - CPPPATH=['$CPPPATH', './include'], - QT_AUTOSCAN=0) -""") - -test.write('aaa.cpp', r""" -#include "aaa.h" -""") - -test.write(['include', 'aaa.h'], r""" -#include "my_qobject.h" -void aaa(void) Q_OBJECT; -""") - -test.write('bbb.h', r""" -void bbb(void); -""") - -test.write('bbb.cpp', r""" -#include "my_qobject.h" -void bbb(void) Q_OBJECT -#include "bbb.moc" -""") - -test.write(['ui', 'ccc.ui'], r""" -void ccc(void) -""") - -test.write('ddd.cpp', r""" -#include "ddd.h" -""") - -test.write(['include', 'ddd.h'], r""" -#include "my_qobject.h" -void ddd(void) Q_OBJECT; -""") - -test.write('eee.h', r""" -void eee(void); -""") - -test.write('eee.cpp', r""" -#include "my_qobject.h" -void eee(void) Q_OBJECT -#include "moc_eee.cpp" -""") - -test.write(['ui', 'fff.ui'], r""" -void fff(void) -""") - -test.write('main.cpp', r""" -#include "aaa.h" -#include "bbb.h" -#include "ui/ccc.h" -#include "ddd.h" -#include "eee.h" -#include "uic_fff.hpp" - -int main(void) { - aaa(); bbb(); ccc(); ddd(); eee(); fff(); return 0; -} -""") - -test.run(arguments="--warn=no-tool-qt-deprecated " + aaa_exe) - -# normal invocation -test.must_exist(test.workpath('include', 'moc_aaa.cc')) -test.must_exist(test.workpath('bbb.moc')) -test.must_exist(test.workpath('ui', 'ccc.h')) -test.must_exist(test.workpath('ui', 'uic_ccc.cc')) -test.must_exist(test.workpath('ui', 'moc_ccc.cc')) - -# manual target spec. -test.must_exist(test.workpath('moc-ddd.cpp')) -test.must_exist(test.workpath('moc_eee.cpp')) -test.must_exist(test.workpath('include', 'uic_fff.hpp')) -test.must_exist(test.workpath('fff.cpp')) -test.must_exist(test.workpath('fff.moc.cpp')) - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/moc-from-cpp.py scons-4.5.2+dfsg/test/QT/moc-from-cpp.py --- scons-4.4.0+dfsg/test/QT/moc-from-cpp.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/moc-from-cpp.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,114 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Create a moc file from a cpp file. -""" - -import TestSCons - -test = TestSCons.TestSCons() - -test.Qt_dummy_installation() - -############################################################################## - -lib_aaa = TestSCons.lib_ + 'aaa' + TestSCons._lib -moc = 'aaa.moc' - -test.Qt_create_SConstruct('SConstruct') - -test.write('SConscript', """ -Import("env dup") -if dup == 0: env.Append(CPPPATH=['.']) -env.StaticLibrary(target = '%s', source = ['aaa.cpp','useit.cpp']) -""" % lib_aaa) - -test.write('aaa.h', r""" -void aaa(void); -""") - -test.write('aaa.cpp', r""" -#include "my_qobject.h" -void aaa(void) Q_OBJECT -#include "%s" -""" % moc) - -test.write('useit.cpp', r""" -#include "aaa.h" -void useit() { - aaa(); -} -""") - -test.run( - arguments="--warn=no-tool-qt-deprecated " + lib_aaa, - stderr=TestSCons.noisy_ar, - match=TestSCons.match_re_dotall, -) - -test.up_to_date(options='-n --warn=no-tool-qt-deprecated', arguments=lib_aaa) - -test.write('aaa.cpp', r""" -#include "my_qobject.h" -/* a change */ -void aaa(void) Q_OBJECT -#include "%s" -""" % moc) - -test.not_up_to_date(options='-n --warn=no-tool-qt-deprecated', arguments=moc) - -test.run(options="--warn=no-tool-qt-deprecated -c", arguments=lib_aaa) - -test.run( - arguments="--warn=no-tool-qt-deprecated variant_dir=1 " - + test.workpath('build', lib_aaa), - stderr=TestSCons.noisy_ar, - match=TestSCons.match_re_dotall, -) - -test.run( - arguments="--warn=no-tool-qt-deprecated variant_dir=1 chdir=1 " - + test.workpath('build', lib_aaa) -) - -test.must_exist(test.workpath('build', moc)) - -test.run( - arguments="--warn=no-tool-qt-deprecated variant_dir=1 dup=0 " - + test.workpath('build_dup0', lib_aaa), - stderr=TestSCons.noisy_ar, - match=TestSCons.match_re_dotall, -) - -test.must_exist(test.workpath('build_dup0', moc)) - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/moc-from-header.py scons-4.5.2+dfsg/test/QT/moc-from-header.py --- scons-4.4.0+dfsg/test/QT/moc-from-header.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/moc-from-header.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,109 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Create a moc file from a header file. -""" - -import os - -import TestSCons - -test = TestSCons.TestSCons() - -test.write('SConstruct', """ -env = Environment() -""") - -test.Qt_dummy_installation() - -# We'll run some test programs later that need to find our dummy -# Qt library. -os.environ['LD_LIBRARY_PATH'] = test.QT_LIB_DIR - -############################################################################## - -aaa_exe = 'aaa' + TestSCons._exe -build_aaa_exe = test.workpath('build', aaa_exe) -moc = 'moc_aaa.cc' - -test.Qt_create_SConstruct('SConstruct') - -test.write('SConscript', """\ -Import("env") -env.Program(target = 'aaa', source = 'aaa.cpp') -if env['PLATFORM'] == 'darwin': - env.Install('.', 'qt/lib/libmyqt.dylib') -""") - -test.write('aaa.cpp', r""" -#include "aaa.h" -int main(void) { aaa(); return 0; } -""") - -test.write('aaa.h', r""" -#include "my_qobject.h" -void aaa(void) Q_OBJECT; -""") - -test.run(arguments="--warn=no-tool-qt-deprecated") -test.up_to_date(options='--warn=no-tool-qt-deprecated -n', arguments=aaa_exe) - -test.write('aaa.h', r""" -/* a change */ -#include "my_qobject.h" -void aaa(void) Q_OBJECT; -""") - -test.not_up_to_date(options='--warn=no-tool-qt-deprecated -n', arguments=moc) - -test.run( - arguments="--warn=no-tool-qt-deprecated", - program=test.workpath(aaa_exe), - stdout='aaa.h\n', -) - -test.run(arguments="--warn=no-tool-qt-deprecated variant_dir=1 " + build_aaa_exe) - -test.run( - arguments="--warn=no-tool-qt-deprecated variant_dir=1 chdir=1 " + build_aaa_exe -) - -test.must_exist(test.workpath('build', moc)) - -test.run( - arguments="--warn=no-tool-qt-deprecated variant_dir=1 chdir=1 dup=0 " - + test.workpath('build_dup0', aaa_exe) -) - -test.must_exist(['build_dup0', moc]) - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/copied-env.py scons-4.5.2+dfsg/test/QT/qt3/copied-env.py --- scons-4.4.0+dfsg/test/QT/qt3/copied-env.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/copied-env.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,83 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Test Qt with a copied construction environment. +""" + +import TestSCons + +test = TestSCons.TestSCons() + +test.Qt_dummy_installation() + +test.Qt_create_SConstruct('SConstruct', qt_tool='qt3') + +test.write('SConscript', """\ +Import("env") +env.Append(CPPDEFINES = ['FOOBAZ']) + +copy = env.Clone() +copy.Append(CPPDEFINES = ['MYLIB_IMPL']) + +copy.SharedLibrary( + target = 'MyLib', + source = ['MyFile.cpp','MyForm.ui'] +) +""") + +test.write('MyFile.h', r""" +void aaa(void); +""") + +test.write('MyFile.cpp', r""" +#include "MyFile.h" +void useit() { + aaa(); +} +""") + +test.write('MyForm.ui', r""" +void aaa(void) +""") + +test.run() + +moc_MyForm = [x for x in test.stdout().split('\n') if x.find('moc_MyForm') != -1] + +MYLIB_IMPL = [x for x in moc_MyForm if x.find('MYLIB_IMPL') != -1] + +if not MYLIB_IMPL: + print("Did not find MYLIB_IMPL on moc_MyForm compilation line:") + print(test.stdout()) + test.fail_test() + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/CPPPATH-appended.py scons-4.5.2+dfsg/test/QT/qt3/CPPPATH-appended.py --- scons-4.4.0+dfsg/test/QT/qt3/CPPPATH-appended.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/CPPPATH-appended.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Test that an appended relative CPPPATH works with generated files. + +This is basically the same as CPPPATH.py, but the include path +is env.Append-ed and everything goes into sub directory "sub". +""" + +import os.path + +import TestSCons + +test = TestSCons.TestSCons() + +test.subdir('sub', ['sub', 'local_include']) + +test.Qt_dummy_installation() + +aaa_exe = os.path.join('sub', 'aaa' + TestSCons._exe) + +test.Qt_create_SConstruct('SConstruct') + +test.write('SConscript', r""" +SConscript('sub/SConscript') +""") + +test.write(['sub', 'SConscript'], r""" +Import("env") +env.Append(CPPPATH=['./local_include']) +env.Program(target = 'aaa', source = 'aaa.cpp') +""") + +test.write(['sub', 'aaa.cpp'], r""" +#include "aaa.h" +int main(void) { aaa(); return 0; } +""") + +test.write(['sub', 'aaa.h'], r""" +#include "my_qobject.h" +#include "local_include.h" +void aaa(void) Q_OBJECT; +""") + +test.write(['sub', 'local_include', 'local_include.h'], r""" +/* empty; just needs to be found */ +""") + +test.run(arguments='--warn=no-tool-qt-deprecated ' + aaa_exe) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/CPPPATH.py scons-4.5.2+dfsg/test/QT/qt3/CPPPATH.py --- scons-4.4.0+dfsg/test/QT/qt3/CPPPATH.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/CPPPATH.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,70 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Test that an overwritten CPPPATH works with generated files. +""" + +import TestSCons + +test = TestSCons.TestSCons() + +test.subdir('local_include') + +test.Qt_dummy_installation() + +aaa_exe = 'aaa' + TestSCons._exe + +test.Qt_create_SConstruct('SConstruct') + +test.write('SConscript', """\ +Import("env") +env.Program(target = 'aaa', source = 'aaa.cpp', CPPPATH=['$CPPPATH', './local_include']) +""") + +test.write('aaa.cpp', r""" +#include "aaa.h" +int main(void) { aaa(); return 0; } +""") + +test.write('aaa.h', r""" +#include "my_qobject.h" +#include "local_include.h" +void aaa(void) Q_OBJECT; +""") + +test.write(['local_include', 'local_include.h'], r""" +/* empty; just needs to be found */ +""") + +test.run(arguments='--warn=no-tool-qt-deprecated ' + aaa_exe) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/empty-env.py scons-4.5.2+dfsg/test/QT/qt3/empty-env.py --- scons-4.4.0+dfsg/test/QT/qt3/empty-env.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/empty-env.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,78 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Test Qt creation from a copied empty environment. +""" + +import TestSCons + +test = TestSCons.TestSCons() + +test.Qt_dummy_installation('qt') + +test.write('SConstruct', """\ +orig = Environment() +env = orig.Clone(QT3DIR = r'%s', + QT3_LIB = r'%s', + QT3_MOC = r'%s', + QT3_UIC = r'%s', + tools=['qt3']) +env.Program('main', 'main.cpp', CPPDEFINES=['FOO'], LIBS=[]) +""" % (test.QT, test.QT_LIB, test.QT_MOC, test.QT_UIC)) + +test.write('main.cpp', r""" +#include "foo6.h" +int main(void) { foo6(); return 0; } +""") + +test.write(['qt', 'include', 'foo6.h'], """\ +#include +void +foo6(void) +{ +#ifdef FOO + printf("qt/include/foo6.h\\n"); +#endif +} +""") + +# we can receive warnings about a non detected qt (empty QTDIR) +# these are not critical, but may be annoying. +test.run(stderr=None) + +test.run( + program=test.workpath('main' + TestSCons._exe), + stderr=None, + stdout='qt/include/foo6.h\n', +) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/generated-ui.py scons-4.5.2+dfsg/test/QT/qt3/generated-ui.py --- scons-4.4.0+dfsg/test/QT/qt3/generated-ui.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/generated-ui.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,135 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Test that the UI scanning logic correctly picks up scansG +""" + +import os + +import TestSCons + +test = TestSCons.TestSCons() + +if not os.environ.get('QTDIR', None): + x ="External environment variable $QTDIR not set; skipping test(s).\n" + test.skip_test(x) + +test.subdir(['layer'], + ['layer', 'aclock'], + ['layer', 'aclock', 'qt_bug']) + +test.write(['SConstruct'], """\ +import os +aa=os.getcwd() + +env=Environment(tools=['default','expheaders','qt3'],toolpath=[aa]) +if 'HOME' in os.environ: + env['ENV']['HOME'] = os.environ['HOME'] +env["EXP_HEADER_ABS"]=os.path.join(os.getcwd(),'include') +if not os.access(env["EXP_HEADER_ABS"],os.F_OK): + os.mkdir (env["EXP_HEADER_ABS"]) +Export('env') +env.SConscript('layer/aclock/qt_bug/SConscript') +""") + +test.write(['expheaders.py'], """\ +import SCons.Defaults +def ExpHeaderScanner(node, env, path): + return [] +def generate(env): + HeaderAction=SCons.Action.Action([SCons.Defaults.Copy('$TARGET','$SOURCE'),SCons.Defaults.Chmod('$TARGET',0o755)]) + HeaderBuilder= SCons.Builder.Builder(action=HeaderAction) + env['BUILDERS']['ExportHeaders'] = HeaderBuilder +def exists(env): + return 0 +""") + +test.write(['layer', 'aclock', 'qt_bug', 'SConscript'], """\ +import os + +Import ("env") +#src=os.path.join(env.Dir('.').srcnode().abspath, 'testfile.h') +env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'main.h'), 'main.h') +env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'migraform.h'), 'migraform.h') +env.Append(CPPPATH=env["EXP_HEADER_ABS"]) +env.StaticLibrary('all',['main.ui','migraform.ui']) +""") + +test.write(['layer', 'aclock', 'qt_bug', 'main.ui'], """\ + +Main + + + Main + + + + 0 + 0 + 600 + 385 + + + + + migraform.h + + +""") + +test.write(['layer', 'aclock', 'qt_bug', 'migraform.ui'], """\ + +MigrateForm + + + MigrateForm + + + + 0 + 0 + %s + 385 + + + + +""") + +test.run( + stderr=TestSCons.noisy_ar, + match=TestSCons.match_re_dotall, +) + +test.up_to_date(arguments=".") + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/installed.py scons-4.5.2+dfsg/test/QT/qt3/installed.py --- scons-4.4.0+dfsg/test/QT/qt3/installed.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/installed.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,220 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Look if qt3 is installed, and try out all builders. +""" + +import os +import sys + +import TestSCons + +test = TestSCons.TestSCons() + +if not os.environ.get('QTDIR', None): + x ="External environment variable $QTDIR not set; skipping test(s).\n" + test.skip_test(x) + +test.Qt_dummy_installation() + +QTDIR=os.environ['QTDIR'] + + +test.write('SConstruct', """\ +import os +dummy_env = Environment() +ENV = dummy_env['ENV'] +try: + PATH=ARGUMENTS['PATH'] + if 'PATH' in ENV: + ENV_PATH = PATH + os.pathsep + ENV['PATH'] + else: + Exit(0) # this is certainly a weird system :-) +except KeyError: + ENV_PATH=ENV.get('PATH', '') + +env = Environment(tools=['default','qt3'], + ENV={'PATH':ENV_PATH, + 'PATHEXT':os.environ.get('PATHEXT'), + 'HOME':os.getcwd(), + 'SystemRoot':ENV.get('SystemRoot')}, + # moc / uic want to write stuff in ~/.qt + CXXFILESUFFIX=".cpp") + +conf = env.Configure() +if not conf.CheckLib(env.subst("$QT3_LIB"), autoadd=0): + conf.env['QT3_LIB'] = 'qt-mt' + if not conf.CheckLib(env.subst("$QT3_LIB"), autoadd=0): + Exit(0) +env = conf.Finish() +VariantDir('bld', '.') +env.Program('bld/test_realqt', ['bld/mocFromCpp.cpp', + 'bld/mocFromH.cpp', + 'bld/anUiFile.ui', + 'bld/main.cpp']) +""") + +test.write('mocFromCpp.h', """\ +void mocFromCpp(); +""") + +test.write('mocFromCpp.cpp', """\ +#include +#include "mocFromCpp.h" +class MyClass1 : public QObject { + Q_OBJECT + public: + MyClass1() : QObject() {}; + public slots: + void myslot() {}; +}; +void mocFromCpp() { + MyClass1 myclass; +} +#include "mocFromCpp.moc" +""") + +test.write('mocFromH.h', """\ +#include +class MyClass2 : public QObject { + Q_OBJECT; + public: + MyClass2(); + public slots: + void myslot(); +}; +void mocFromH(); +""") + +test.write('mocFromH.cpp', """\ +#include "mocFromH.h" + +MyClass2::MyClass2() : QObject() {} +void MyClass2::myslot() {} +void mocFromH() { + MyClass2 myclass; +} +""") + +test.write('anUiFile.ui', """\ + +MyWidget + + QWidget + + MyWidget + + + MyWidget + + + + anUiFile.ui.h + + + testSlot() + + + +""") + +test.write('anUiFile.ui.h', r""" +#include +#if QT_VERSION >= 0x030100 +void MyWidget::testSlot() +{ + printf("Hello World\n"); +} +#endif +""") + +test.write('main.cpp', r""" +#include +#include "mocFromCpp.h" +#include "mocFromH.h" +#include "anUiFile.h" +#include + +int main(int argc, char **argv) { + QApplication app(argc, argv); + mocFromCpp(); + mocFromH(); + MyWidget mywidget; +#if QT_VERSION >= 0x030100 + mywidget.testSlot(); +#else + printf("Hello World\n"); +#endif + return 0; +} +""") + +test.run(arguments="--warn=no-tool-qt-deprecated bld/test_realqt" + TestSCons._exe) + +test.run( + program=test.workpath("bld", "test_realqt"), + arguments="--warn=no-tool-qt-deprecated", + stdout=None, + status=None, + stderr=None, +) + +if test.stdout() != "Hello World\n" or test.stderr() != '' or test.status: + sys.stdout.write(test.stdout()) + sys.stderr.write(test.stderr()) + # The test might be run on a system that doesn't have an X server + # running, or may be run by an ID that can't connect to the server. + # If so, then print whatever it showed us (which is in and of itself + # an indication that it built correctly) but don't fail the test. + expect = 'cannot connect to X server' + test.fail_test(test.stdout()) + test.fail_test(expect not in test.stderr()) + if test.status != 1 and (test.status >> 8) != 1: + sys.stdout.write('test_realqt returned status %s\n' % test.status) + test.fail_test() + +QT3DIR = os.environ['QTDIR'] +PATH = os.environ['PATH'] +os.environ['QTDIR'] = '' +os.environ['PATH'] = '.' + +test.run( + stderr=None, + arguments="-c bld/test_realqt" + TestSCons._exe, +) + +expect1 = "scons: warning: Could not detect qt3, using empty QT3DIR" +expect2 = "scons: warning: Could not detect qt3, using moc executable as a hint" + +test.fail_test(expect1 not in test.stderr() and expect2 not in test.stderr()) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/manual.py scons-4.5.2+dfsg/test/QT/qt3/manual.py --- scons-4.4.0+dfsg/test/QT/qt3/manual.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/manual.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,148 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Test the manual QT3 builder calls. +""" + +import TestSCons + +test = TestSCons.TestSCons() + +test.subdir('include', 'ui') + +test.Qt_dummy_installation() + +aaa_exe = 'aaa' + TestSCons._exe + +test.Qt_create_SConstruct('SConstruct') + +test.write('SConscript', r""" +Import("env") +sources = ['aaa.cpp', 'bbb.cpp', 'ddd.cpp', 'eee.cpp', 'main.cpp'] + +# normal invocation +sources.append(env.Moc('include/aaa.h')) +moc = env.Moc('bbb.cpp') +env.Ignore( moc, moc ) +sources.extend(env.Uic('ui/ccc.ui')[1:]) + +# manual target specification +sources.append(env.Moc('moc-ddd.cpp', 'include/ddd.h', + QT3_MOCHPREFIX='')) # Watch out ! +moc = env.Moc('moc_eee.cpp', 'eee.cpp') +env.Ignore( moc, moc ) +sources.extend(env.Uic(['include/uic_fff.hpp', 'fff.cpp', 'fff.moc.cpp'], + 'ui/fff.ui')[1:]) + +print(list(map(str,sources))) +env.Program(target='aaa', + source=sources, + CPPPATH=['$CPPPATH', './include'], + QT3_AUTOSCAN=0) +""") + +test.write('aaa.cpp', r""" +#include "aaa.h" +""") + +test.write(['include', 'aaa.h'], r""" +#include "my_qobject.h" +void aaa(void) Q_OBJECT; +""") + +test.write('bbb.h', r""" +void bbb(void); +""") + +test.write('bbb.cpp', r""" +#include "my_qobject.h" +void bbb(void) Q_OBJECT +#include "bbb.moc" +""") + +test.write(['ui', 'ccc.ui'], r""" +void ccc(void) +""") + +test.write('ddd.cpp', r""" +#include "ddd.h" +""") + +test.write(['include', 'ddd.h'], r""" +#include "my_qobject.h" +void ddd(void) Q_OBJECT; +""") + +test.write('eee.h', r""" +void eee(void); +""") + +test.write('eee.cpp', r""" +#include "my_qobject.h" +void eee(void) Q_OBJECT +#include "moc_eee.cpp" +""") + +test.write(['ui', 'fff.ui'], r""" +void fff(void) +""") + +test.write('main.cpp', r""" +#include "aaa.h" +#include "bbb.h" +#include "ui/ccc.h" +#include "ddd.h" +#include "eee.h" +#include "uic_fff.hpp" + +int main(void) { + aaa(); bbb(); ccc(); ddd(); eee(); fff(); return 0; +} +""") + +test.run(arguments=aaa_exe) + +# normal invocation +test.must_exist(test.workpath('include', 'moc_aaa.cc')) +test.must_exist(test.workpath('bbb.moc')) +test.must_exist(test.workpath('ui', 'ccc.h')) +test.must_exist(test.workpath('ui', 'uic_ccc.cc')) +test.must_exist(test.workpath('ui', 'moc_ccc.cc')) + +# manual target spec. +test.must_exist(test.workpath('moc-ddd.cpp')) +test.must_exist(test.workpath('moc_eee.cpp')) +test.must_exist(test.workpath('include', 'uic_fff.hpp')) +test.must_exist(test.workpath('fff.cpp')) +test.must_exist(test.workpath('fff.moc.cpp')) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/moc-from-cpp.py scons-4.5.2+dfsg/test/QT/qt3/moc-from-cpp.py --- scons-4.4.0+dfsg/test/QT/qt3/moc-from-cpp.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/moc-from-cpp.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,114 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Create a moc file from a cpp file. +""" + +import TestSCons + +test = TestSCons.TestSCons() + +test.Qt_dummy_installation() + +############################################################################## + +lib_aaa = TestSCons.lib_ + 'aaa' + TestSCons._lib +moc = 'aaa.moc' + +test.Qt_create_SConstruct('SConstruct') + +test.write('SConscript', """ +Import("env dup") +if dup == 0: env.Append(CPPPATH=['.']) +env.StaticLibrary(target = '%s', source = ['aaa.cpp','useit.cpp']) +""" % lib_aaa) + +test.write('aaa.h', r""" +void aaa(void); +""") + +test.write('aaa.cpp', r""" +#include "my_qobject.h" +void aaa(void) Q_OBJECT +#include "%s" +""" % moc) + +test.write('useit.cpp', r""" +#include "aaa.h" +void useit() { + aaa(); +} +""") + +test.run( + arguments="--warn=no-tool-qt-deprecated " + lib_aaa, + stderr=TestSCons.noisy_ar, + match=TestSCons.match_re_dotall, +) + +test.up_to_date(options='-n --warn=no-tool-qt-deprecated', arguments=lib_aaa) + +test.write('aaa.cpp', r""" +#include "my_qobject.h" +/* a change */ +void aaa(void) Q_OBJECT +#include "%s" +""" % moc) + +test.not_up_to_date(options='-n --warn=no-tool-qt-deprecated', arguments=moc) + +test.run(options="--warn=no-tool-qt-deprecated -c", arguments=lib_aaa) + +test.run( + arguments="--warn=no-tool-qt-deprecated variant_dir=1 " + + test.workpath('build', lib_aaa), + stderr=TestSCons.noisy_ar, + match=TestSCons.match_re_dotall, +) + +test.run( + arguments="--warn=no-tool-qt-deprecated variant_dir=1 chdir=1 " + + test.workpath('build', lib_aaa) +) + +test.must_exist(test.workpath('build', moc)) + +test.run( + arguments="--warn=no-tool-qt-deprecated variant_dir=1 dup=0 " + + test.workpath('build_dup0', lib_aaa), + stderr=TestSCons.noisy_ar, + match=TestSCons.match_re_dotall, +) + +test.must_exist(test.workpath('build_dup0', moc)) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/moc-from-header.py scons-4.5.2+dfsg/test/QT/qt3/moc-from-header.py --- scons-4.4.0+dfsg/test/QT/qt3/moc-from-header.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/moc-from-header.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,109 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Create a moc file from a header file. +""" + +import os + +import TestSCons + +test = TestSCons.TestSCons() + +test.write('SConstruct', """ +env = Environment() +""") + +test.Qt_dummy_installation() + +# We'll run some test programs later that need to find our dummy +# Qt library. +os.environ['LD_LIBRARY_PATH'] = test.QT_LIB_DIR + +############################################################################## + +aaa_exe = 'aaa' + TestSCons._exe +build_aaa_exe = test.workpath('build', aaa_exe) +moc = 'moc_aaa.cc' + +test.Qt_create_SConstruct('SConstruct') + +test.write('SConscript', """\ +Import("env") +env.Program(target = 'aaa', source = 'aaa.cpp') +if env['PLATFORM'] == 'darwin': + env.Install('.', 'qt/lib/libmyqt.dylib') +""") + +test.write('aaa.cpp', r""" +#include "aaa.h" +int main(void) { aaa(); return 0; } +""") + +test.write('aaa.h', r""" +#include "my_qobject.h" +void aaa(void) Q_OBJECT; +""") + +test.run(arguments="--warn=no-tool-qt-deprecated") +test.up_to_date(options='--warn=no-tool-qt-deprecated -n', arguments=aaa_exe) + +test.write('aaa.h', r""" +/* a change */ +#include "my_qobject.h" +void aaa(void) Q_OBJECT; +""") + +test.not_up_to_date(options='--warn=no-tool-qt-deprecated -n', arguments=moc) + +test.run( + arguments="--warn=no-tool-qt-deprecated", + program=test.workpath(aaa_exe), + stdout='aaa.h\n', +) + +test.run(arguments="--warn=no-tool-qt-deprecated variant_dir=1 " + build_aaa_exe) + +test.run( + arguments="--warn=no-tool-qt-deprecated variant_dir=1 chdir=1 " + build_aaa_exe +) + +test.must_exist(test.workpath('build', moc)) + +test.run( + arguments="--warn=no-tool-qt-deprecated variant_dir=1 chdir=1 dup=0 " + + test.workpath('build_dup0', aaa_exe) +) + +test.must_exist(['build_dup0', moc]) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/QTFLAGS.py scons-4.5.2+dfsg/test/QT/qt3/QTFLAGS.py --- scons-4.4.0+dfsg/test/QT/qt3/QTFLAGS.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/QTFLAGS.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,219 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Testing the configuration mechanisms of the 'qt3' tool. +""" + +import TestSCons + +_python_ = TestSCons._python_ +_exe = TestSCons._exe + +test = TestSCons.TestSCons() + +test.Qt_dummy_installation() +test.subdir('work1', 'work2') + +test.run( + chdir=test.workpath('qt', 'lib'), + arguments=".", + stderr=TestSCons.noisy_ar, + match=TestSCons.match_re_dotall, +) + +QT3 = test.workpath('qt') +QT3_LIB = 'myqt' +QT3_MOC = '%s %s' % (_python_, test.workpath('qt', 'bin', 'mymoc.py')) +QT3_UIC = '%s %s' % (_python_, test.workpath('qt', 'bin', 'myuic.py')) + +def createSConstruct(test, place, overrides): + test.write(place, """\ +env = Environment( + tools=['default','qt3'], + QT3DIR = r'%s', + QT3_LIB = r'%s', + QT3_MOC = r'%s', + QT3_UIC = r'%s', + %s # last because 'overrides' may add comma +) +if ARGUMENTS.get('variant_dir', 0): + if ARGUMENTS.get('chdir', 0): + SConscriptChdir(1) + else: + SConscriptChdir(0) + VariantDir('build', '.', duplicate=1) + sconscript = Dir('build').File('SConscript') +else: + sconscript = File('SConscript') +Export("env") +SConscript(sconscript) +""" % (QT3, QT3_LIB, QT3_MOC, QT3_UIC, overrides)) + + +createSConstruct(test, ['work1', 'SConstruct'], + """QT3_UICIMPLFLAGS='-x', + QT3_UICDECLFLAGS='-y', + QT3_MOCFROMHFLAGS='-z', + QT3_MOCFROMCXXFLAGS='-i -w', + QT3_UICDECLPREFIX='uic-', + QT3_UICDECLSUFFIX='.hpp', + QT3_UICIMPLPREFIX='', + QT3_UICIMPLSUFFIX='.cxx', + QT3_MOCHPREFIX='mmm', + QT3_MOCHSUFFIX='.cxx', + QT3_MOCCXXPREFIX='moc', + QT3_MOCCXXSUFFIX='.inl', + QT3_UISUFFIX='.myui',""") +test.write(['work1', 'SConscript'],""" +Import("env") +env.Program('mytest', ['mocFromH.cpp', + 'mocFromCpp.cpp', + 'an_ui_file.myui', + 'another_ui_file.myui', + 'main.cpp']) +""") + +test.write(['work1', 'mocFromH.hpp'], """ +#include "my_qobject.h" +void mocFromH() Q_OBJECT +""") + +test.write(['work1', 'mocFromH.cpp'], """ +#include "mocFromH.hpp" +""") + +test.write(['work1', 'mocFromCpp.cpp'], """ +#include "my_qobject.h" +void mocFromCpp() Q_OBJECT +#include "mocmocFromCpp.inl" +""") + +test.write(['work1', 'an_ui_file.myui'], """ +void an_ui_file() +""") + +test.write(['work1', 'another_ui_file.myui'], """ +void another_ui_file() +""") + +test.write(['work1', 'another_ui_file.desc.hpp'], """ +/* just a dependency checker */ +""") + +test.write(['work1', 'main.cpp'], """ +#include "mocFromH.hpp" +#include "uic-an_ui_file.hpp" +#include "uic-another_ui_file.hpp" +void mocFromCpp(); + +int main(void) { + mocFromH(); + mocFromCpp(); + an_ui_file(); + another_ui_file(); +} +""") + +test.run(chdir='work1', arguments="mytest" + _exe) + +test.must_exist( + ['work1', 'mmmmocFromH.cxx'], + ['work1', 'mocmocFromCpp.inl'], + ['work1', 'an_ui_file.cxx'], + ['work1', 'uic-an_ui_file.hpp'], + ['work1', 'mmman_ui_file.cxx'], + ['work1', 'another_ui_file.cxx'], + ['work1', 'uic-another_ui_file.hpp'], + ['work1', 'mmmanother_ui_file.cxx'], +) + +def _flagTest(test,fileToContentsStart): + for f,c in fileToContentsStart.items(): + if test.read(test.workpath('work1', f), mode='r').find(c) != 0: + return 1 + return 0 + +test.fail_test( + _flagTest( + test, + { + 'mmmmocFromH.cxx': '/* mymoc.py -z */', + 'mocmocFromCpp.inl': '/* mymoc.py -w */', + 'an_ui_file.cxx': '/* myuic.py -x */', + 'uic-an_ui_file.hpp': '/* myuic.py -y */', + 'mmman_ui_file.cxx': '/* mymoc.py -z */', + }, + ) +) + +test.write(['work2', 'SConstruct'], """ +import os.path + +env1 = Environment( + tools=['qt3'], + QT3DIR=r'%(QT3DIR)s', + QT3_BINPATH='$QT3DIR/bin64', + QT3_LIBPATH='$QT3DIR/lib64', + QT3_CPPPATH='$QT3DIR/h64', +) + +cpppath = env1.subst('$CPPPATH') +if os.path.normpath(cpppath) != os.path.join(r'%(QT3DIR)s', 'h64'): + print(cpppath) + Exit(1) +libpath = env1.subst('$LIBPATH') +if os.path.normpath(libpath) != os.path.join(r'%(QT3DIR)s', 'lib64'): + print(libpath) + Exit(2) +qt_moc = env1.subst('$QT3_MOC') +if os.path.normpath(qt_moc) != os.path.join(r'%(QT3DIR)s', 'bin64', 'moc'): + print(qt_moc) + Exit(3) + +env2 = Environment( + tools=['default', 'qt3'], QT3DIR=None, QT3_LIB=None, QT3_CPPPATH=None, QT3_LIBPATH=None +) + +env2.Program('main.cpp') +""" % {'QT3DIR':QT3}) + +test.write(['work2', 'main.cpp'], """ +int main(void) { return 0; } +""") + +# Ignore stderr, because if Qt is not installed, +# there may be a warning about an empty QTDIR on stderr. +test.run(chdir='work2', stderr=None) + +test.must_exist(['work2', 'main' + _exe]) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/qt_warnings.py scons-4.5.2+dfsg/test/QT/qt3/qt_warnings.py --- scons-4.4.0+dfsg/test/QT/qt3/qt_warnings.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/qt_warnings.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Test the Qt tool warnings. +""" + +import os +import re + +import TestSCons + +test = TestSCons.TestSCons() + +SConstruct_path = test.workpath('SConstruct') + +test.Qt_dummy_installation() + +test.Qt_create_SConstruct(SConstruct_path) + +test.write('aaa.cpp', r""" +#include "my_qobject.h" +void aaa(void) Q_OBJECT +""") + +test.write('SConscript', r""" +Import("env") +import os +env.StaticLibrary('aaa.cpp') +""") + +test.run(stderr=None) + +match12 = r""" +scons: warning: Generated moc file 'aaa.moc' is not included by 'aaa.cpp' +""" + TestSCons.file_expr + +if not re.search(match12, test.stderr()): + print("Did not find expected regular expression in stderr:") + print(test.stderr()) + test.fail_test() + +os.environ['QTDIR'] = test.QT + +test.run(arguments='-n noqtdir=1') + +# We'd like to eliminate $QTDIR from the environment as follows: +# del os.environ['QTDIR'] +# But unfortunately, in at least some versions of Python, the Environment +# class doesn't implement a __delitem__() method to make the library +# call to actually remove the deleted variable from the *external* +# environment, so it only gets removed from the Python dictionary. +# Consequently, we need to just wipe out its value as follows> +os.environ['QTDIR'] = '' +test.run(stderr=None, arguments='-n noqtdir=1') + +moc = test.where_is('moc') +if moc: + import os.path + qtdir = os.path.dirname(os.path.dirname(moc)) + qtdir = qtdir.replace('\\', '\\\\' ) + + expect = r""" +scons: warning: Could not detect qt3, using moc executable as a hint \(QT3DIR=%s\) +File "%s", line \d+, in (\?|) +""" % (qtdir, re.escape(SConstruct_path)) +else: + + expect = r""" +scons: warning: Could not detect qt3, using empty QT3DIR +File "%s", line \d+, in (\?|) +""" % re.escape(SConstruct_path) + +test.fail_test(not test.match_re(test.stderr(), expect)) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/reentrant.py scons-4.5.2+dfsg/test/QT/qt3/reentrant.py --- scons-4.4.0+dfsg/test/QT/qt3/reentrant.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/reentrant.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,74 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Test creation from a copied environment that already has QT variables. +This makes sure the tool initialization is re-entrant. +""" + +import TestSCons + +test = TestSCons.TestSCons() + +test.Qt_dummy_installation('qt') + +test.write(['qt', 'include', 'foo5.h'], """\ +#include +void +foo5(void) +{ +#ifdef FOO + printf("qt/include/foo5.h\\n"); +#endif +} +""") + +test.Qt_create_SConstruct('SConstruct') + +test.write('SConscript', """\ +Import("env") +env = env.Clone(tools=['qt3']) +env.Program('main', 'main.cpp', CPPDEFINES=['FOO'], LIBS=[]) +""") + +test.write('main.cpp', r""" +#include "foo5.h" +int main(void) { foo5(); return 0; } +""") + +test.run(arguments="--warn=no-tool-qt-deprecated") + +test.run( + arguments='--warn=no-tool-qt-deprecated', + program=test.workpath('main' + TestSCons._exe), + stdout='qt/include/foo5.h\n', +) +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/source-from-ui.py scons-4.5.2+dfsg/test/QT/qt3/source-from-ui.py --- scons-4.4.0+dfsg/test/QT/qt3/source-from-ui.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/source-from-ui.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,161 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Create .cpp, .h, moc_....cpp from a .ui file. +""" + +import os.path + +import TestSCons + +test = TestSCons.TestSCons() + +test.Qt_dummy_installation() + +############################################################################## + +aaa_dll = TestSCons.dll_ + 'aaa' + TestSCons._dll +moc = 'moc_aaa.cc' +cpp = 'uic_aaa.cc' +obj = TestSCons.shobj_ + os.path.splitext(cpp)[0] + TestSCons._shobj +h = 'aaa.h' + +test.Qt_create_SConstruct('SConstruct') + +test.write('SConscript', """\ +Import("env dup") +if dup == 0: env.Append(CPPPATH=['#', '.']) +env.SharedLibrary(target = 'aaa', source = ['aaa.ui', 'useit.cpp']) +""") + +test.write('aaa.ui', r""" +#if defined (_WIN32) || defined(__CYGWIN__) +#define DLLEXPORT __declspec(dllexport) +#else +#define DLLEXPORT +#endif +DLLEXPORT void aaa(void) +""") + +test.write('useit.cpp', r""" +#include "aaa.h" +void useit() { + aaa(); +} +""") + +test.run(arguments=aaa_dll) + +test.up_to_date(options='-n', arguments=aaa_dll) + +test.write('aaa.ui', r""" +/* a change */ +#if defined (_WIN32) || defined(__CYGWIN__) +#define DLLEXPORT __declspec(dllexport) +#else +#define DLLEXPORT +#endif +DLLEXPORT void aaa(void) +""") + +test.not_up_to_date(options='-n', arguments=moc) +test.not_up_to_date(options='-n', arguments=cpp) +test.not_up_to_date(options='-n', arguments=h) + +test.run(arguments=" " + aaa_dll) + +test.write('aaa.ui', r""" +void aaa(void) +//aaa.ui.h +""") + +# test that non-existant ui.h files are ignored (as uic does) +test.run(arguments=" " + aaa_dll) + +test.write('aaa.ui.h', r""" +/* test dependency to .ui.h */ +""") + +test.run(arguments=" " + aaa_dll) + +test.write('aaa.ui.h', r""" +/* changed */ +""") + +test.not_up_to_date(options='-n', arguments=obj) +test.not_up_to_date(options='-n', arguments=cpp) +test.not_up_to_date(options='-n', arguments=h) +test.not_up_to_date(options='-n', arguments=moc) + +# clean up +test.run(arguments=" -c " + aaa_dll) + +test.run( + arguments="variant_dir=1 " + + test.workpath('build', aaa_dll) +) + +test.must_exist(test.workpath('build', moc)) +test.must_exist(test.workpath('build', cpp)) +test.must_exist(test.workpath('build', h)) +test.must_not_exist(test.workpath(moc)) +test.must_not_exist(test.workpath(cpp)) +test.must_not_exist(test.workpath(h)) + +cppContents = test.read(test.workpath('build', cpp), mode='r') +test.fail_test(cppContents.find('#include "aaa.ui.h"') == -1) + +test.run( + arguments="variant_dir=1 chdir=1 " + + test.workpath('build', aaa_dll) +) + +test.must_exist(test.workpath('build', moc)) +test.must_exist(test.workpath('build', cpp)) +test.must_exist(test.workpath('build', h)) +test.must_not_exist(test.workpath(moc)) +test.must_not_exist(test.workpath(cpp)) +test.must_not_exist(test.workpath(h)) + +test.run( + arguments=" variant_dir=1 chdir=1 dup=0 " + + test.workpath('build_dup0', aaa_dll) +) + +test.must_exist(test.workpath('build_dup0', moc)) +test.must_exist(test.workpath('build_dup0', cpp)) +test.must_exist(test.workpath('build_dup0', h)) +test.must_not_exist(test.workpath(moc)) +test.must_not_exist(test.workpath(cpp)) +test.must_not_exist(test.workpath(h)) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/Tool.py scons-4.5.2+dfsg/test/QT/qt3/Tool.py --- scons-4.4.0+dfsg/test/QT/qt3/Tool.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/Tool.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,156 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Verify that applying env.Tool('qt3') after running Configure checks +works properly. This was broken in 0.96.95. + +The configuration here is a moderately stripped-down version of the +real-world configuration for lprof (lprof.sourceforge.net). It's probably +not completely minimal, but we're leaving it as-is since it represents a +good real-world sanity check on the interaction of some key subsystems. +""" + +import os + +import TestSCons + +test = TestSCons.TestSCons() + +if not os.environ.get('QTDIR', None): + x ="External environment variable $QTDIR not set; skipping test(s).\n" + test.skip_test(x) + +test.write('SConstruct', """ +import os + +def DoWithVariables(variables, prefix, what): + saved_variables = { } + for name in variables.keys(): + saved_variables[ name ] = env[ name ][:] + env[ name ].append(variables[ name ]) + + result = what() + + for name in saved_variables.keys(): + env[ name ] = saved_variables[ name ] + env[ prefix+name ] = variables[ name ] + + return result + +def CheckForQtAt(context, qtdir): + context.Message('Checking for Qt at %s... ' % qtdir) + libp = os.path.join(qtdir, 'lib') + cppp = os.path.join(qtdir, 'include') + result = AttemptLinkWithVariables(context, + { "LIBS": "qt-mt", "LIBPATH": libp , "CPPPATH": cppp }, + ''' +#include +int main(int argc, char **argv) { + QApplication qapp(argc, argv); + return 0; +} +''',".cpp","QT_") + context.Result(result) + return result + +def CheckForQt(context): + # list is currently POSIX centric - what happens with Windows? + potential_qt_dirs = [ + "/usr/share/qt3", # Debian unstable + "/usr/share/qt", + "/usr", + "/usr/local", + "/usr/lib/qt3", # Suse + "/usr/lib/qt", + "/usr/qt/3", # Gentoo + "/usr/pkg/qt3" # pkgsrc (NetBSD) + ] + + if 'QTDIR' in os.environ: + potential_qt_dirs.insert(0, os.environ['QTDIR']) + + if env[ 'qt_directory' ] != "/": + uic_path = os.path.join(env['qt_directory'], 'bin', 'uic') + if os.path.isfile(uic_path): + potential_qt_dirs.insert(0, env[ 'qt_directory' ]) + else: + print("QT not found. Invalid qt_directory value - failed to find uic.") + return 0 + + for i in potential_qt_dirs: + context.env.Replace(QT3DIR = i) + if CheckForQtAt(context, i): + # additional checks to validate QT installation + if not os.path.isfile(os.path.join(i, 'bin', 'uic')): + print("QT - failed to find uic.") + return 0 + if not os.path.isfile(os.path.join(i, 'bin', 'moc')): + print("QT - failed to find moc.") + return 0 + if not os.path.exists(os.path.join(i, 'lib')): + print("QT - failed to find QT lib path.") + return 0 + if not os.path.exists(os.path.join(i, 'include')): + print("QT - failed to find QT include path.") + return 0 + return 1 + else: + if i==env['qt_directory']: + print("QT directory not valid. Failed QT test build.") + return 0 + return 0 + +def AttemptLinkWithVariables(context, variables, code, extension, prefix): + return DoWithVariables(variables, prefix, + lambda: context.TryLink(code, extension)) + +env = Environment(CPPPATH=['.'], LIBPATH=['.'], LIBS=[]) + +opts = Variables('lprof.conf') +opts.Add(PathVariable("qt_directory", "Path to Qt directory", "/")) +opts.Update(env) + +env['QT3_LIB'] = 'qt-mt' +config = env.Configure(custom_tests = { + 'CheckForQt' : CheckForQt, +}) + +if not config.CheckForQt(): + print("Failed to find valid QT environment.") + Exit(1) + +env.Tool('qt3', ['$TOOL_PATH']) +""") + +test.run(arguments='.') + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt3/up-to-date.py scons-4.5.2+dfsg/test/QT/qt3/up-to-date.py --- scons-4.4.0+dfsg/test/QT/qt3/up-to-date.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt3/up-to-date.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,144 @@ +#!/usr/bin/env python +# +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +""" +Validate that a stripped-down real-world Qt configuation (thanks +to Leanid Nazdrynau) with a generated .h file is correctly +up-to-date after a build. + +(This catches a bug that was introduced during a signature refactoring +ca. September 2005.) +""" + +import os + +import TestSCons + +_obj = TestSCons._obj + +test = TestSCons.TestSCons() + +if not os.environ.get('QTDIR', None): + x ="External environment variable $QTDIR not set; skipping test(s).\n" + test.skip_test(x) + +test.subdir('layer', + ['layer', 'aclock'], + ['layer', 'aclock', 'qt_bug']) + +test.write('SConstruct', """\ +import os +aa=os.getcwd() + +env=Environment(tools=['default','expheaders','qt3'],toolpath=[aa]) +env["EXP_HEADER_ABS"]=os.path.join(os.getcwd(),'include') +if not os.access(env["EXP_HEADER_ABS"],os.F_OK): + os.mkdir (env["EXP_HEADER_ABS"]) +Export('env') +env.SConscript('layer/aclock/qt_bug/SConscript') +""") + +test.write('expheaders.py', """\ +import SCons.Defaults +def ExpHeaderScanner(node, env, path): + return [] +def generate(env): + HeaderAction=SCons.Action.Action([SCons.Defaults.Copy('$TARGET','$SOURCE'),SCons.Defaults.Chmod('$TARGET',0o755)]) + HeaderBuilder= SCons.Builder.Builder(action=HeaderAction) + env['BUILDERS']['ExportHeaders'] = HeaderBuilder +def exists(env): + return 0 +""") + +test.write(['layer', 'aclock', 'qt_bug', 'SConscript'], """\ +import os + +Import ("env") +env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'main.h'), 'main.h') +env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'migraform.h'), 'migraform.h') +env.Append(CPPPATH=env["EXP_HEADER_ABS"]) +env.StaticLibrary('all',['main.ui','migraform.ui','my.cc']) +""") + +test.write(['layer', 'aclock', 'qt_bug', 'main.ui'], """\ + +Main + + + Main + + + + 0 + 0 + 600 + 385 + + + + + migraform.h + + +""") + +test.write(['layer', 'aclock', 'qt_bug', 'migraform.ui'], """\ + +MigrateForm + + + MigrateForm + + + + 0 + 0 + 600 + 385 + + + + +""") + +test.write(['layer', 'aclock', 'qt_bug', 'my.cc'], """\ +#include +""") + +my_obj = 'layer/aclock/qt_bug/my' + _obj + +test.run(arguments='--warn=no-tool-qt-deprecated ' + my_obj, stderr=None) + +expect = my_obj.replace('/', os.sep) +test.up_to_date(options='--debug=explain', arguments=expect, stderr=None) + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/QTFLAGS.py scons-4.5.2+dfsg/test/QT/QTFLAGS.py --- scons-4.4.0+dfsg/test/QT/QTFLAGS.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/QTFLAGS.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,219 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Testing the configuration mechanisms of the 'qt' tool. -""" - -import TestSCons - -_python_ = TestSCons._python_ -_exe = TestSCons._exe - -test = TestSCons.TestSCons() - -test.Qt_dummy_installation() -test.subdir('work1', 'work2') - -test.run( - chdir=test.workpath('qt', 'lib'), - arguments="--warn=no-tool-qt-deprecated .", - stderr=TestSCons.noisy_ar, - match=TestSCons.match_re_dotall, -) - -QT = test.workpath('qt') -QT_LIB = 'myqt' -QT_MOC = '%s %s' % (_python_, test.workpath('qt', 'bin', 'mymoc.py')) -QT_UIC = '%s %s' % (_python_, test.workpath('qt', 'bin', 'myuic.py')) - -def createSConstruct(test, place, overrides): - test.write(place, """\ -env = Environment( - tools=['default','qt'], - QTDIR = r'%s', - QT_LIB = r'%s', - QT_MOC = r'%s', - QT_UIC = r'%s', - %s # last because 'overrides' may add comma -) -if ARGUMENTS.get('variant_dir', 0): - if ARGUMENTS.get('chdir', 0): - SConscriptChdir(1) - else: - SConscriptChdir(0) - VariantDir('build', '.', duplicate=1) - sconscript = Dir('build').File('SConscript') -else: - sconscript = File('SConscript') -Export("env") -SConscript(sconscript) -""" % (QT, QT_LIB, QT_MOC, QT_UIC, overrides)) - - -createSConstruct(test, ['work1', 'SConstruct'], - """QT_UICIMPLFLAGS='-x', - QT_UICDECLFLAGS='-y', - QT_MOCFROMHFLAGS='-z', - QT_MOCFROMCXXFLAGS='-i -w', - QT_UICDECLPREFIX='uic-', - QT_UICDECLSUFFIX='.hpp', - QT_UICIMPLPREFIX='', - QT_UICIMPLSUFFIX='.cxx', - QT_MOCHPREFIX='mmm', - QT_MOCHSUFFIX='.cxx', - QT_MOCCXXPREFIX='moc', - QT_MOCCXXSUFFIX='.inl', - QT_UISUFFIX='.myui',""") -test.write(['work1', 'SConscript'],""" -Import("env") -env.Program('mytest', ['mocFromH.cpp', - 'mocFromCpp.cpp', - 'an_ui_file.myui', - 'another_ui_file.myui', - 'main.cpp']) -""") - -test.write(['work1', 'mocFromH.hpp'], """ -#include "my_qobject.h" -void mocFromH() Q_OBJECT -""") - -test.write(['work1', 'mocFromH.cpp'], """ -#include "mocFromH.hpp" -""") - -test.write(['work1', 'mocFromCpp.cpp'], """ -#include "my_qobject.h" -void mocFromCpp() Q_OBJECT -#include "mocmocFromCpp.inl" -""") - -test.write(['work1', 'an_ui_file.myui'], """ -void an_ui_file() -""") - -test.write(['work1', 'another_ui_file.myui'], """ -void another_ui_file() -""") - -test.write(['work1', 'another_ui_file.desc.hpp'], """ -/* just a dependency checker */ -""") - -test.write(['work1', 'main.cpp'], """ -#include "mocFromH.hpp" -#include "uic-an_ui_file.hpp" -#include "uic-another_ui_file.hpp" -void mocFromCpp(); - -int main(void) { - mocFromH(); - mocFromCpp(); - an_ui_file(); - another_ui_file(); -} -""") - -test.run(chdir='work1', arguments="--warn=no-tool-qt-deprecated mytest" + _exe) - -test.must_exist( - ['work1', 'mmmmocFromH.cxx'], - ['work1', 'mocmocFromCpp.inl'], - ['work1', 'an_ui_file.cxx'], - ['work1', 'uic-an_ui_file.hpp'], - ['work1', 'mmman_ui_file.cxx'], - ['work1', 'another_ui_file.cxx'], - ['work1', 'uic-another_ui_file.hpp'], - ['work1', 'mmmanother_ui_file.cxx'], -) - -def _flagTest(test,fileToContentsStart): - for f,c in fileToContentsStart.items(): - if test.read(test.workpath('work1', f), mode='r').find(c) != 0: - return 1 - return 0 - -test.fail_test( - _flagTest( - test, - { - 'mmmmocFromH.cxx': '/* mymoc.py -z */', - 'mocmocFromCpp.inl': '/* mymoc.py -w */', - 'an_ui_file.cxx': '/* myuic.py -x */', - 'uic-an_ui_file.hpp': '/* myuic.py -y */', - 'mmman_ui_file.cxx': '/* mymoc.py -z */', - }, - ) -) - -test.write(['work2', 'SConstruct'], """ -import os.path - -env1 = Environment( - tools=['qt'], - QTDIR=r'%(QTDIR)s', - QT_BINPATH='$QTDIR/bin64', - QT_LIBPATH='$QTDIR/lib64', - QT_CPPPATH='$QTDIR/h64', -) - -cpppath = env1.subst('$CPPPATH') -if os.path.normpath(cpppath) != os.path.join(r'%(QTDIR)s', 'h64'): - print(cpppath) - Exit(1) -libpath = env1.subst('$LIBPATH') -if os.path.normpath(libpath) != os.path.join(r'%(QTDIR)s', 'lib64'): - print(libpath) - Exit(2) -qt_moc = env1.subst('$QT_MOC') -if os.path.normpath(qt_moc) != os.path.join(r'%(QTDIR)s', 'bin64', 'moc'): - print(qt_moc) - Exit(3) - -env2 = Environment( - tools=['default', 'qt'], QTDIR=None, QT_LIB=None, QT_CPPPATH=None, QT_LIBPATH=None -) - -env2.Program('main.cpp') -""" % {'QTDIR':QT}) - -test.write(['work2', 'main.cpp'], """ -int main(void) { return 0; } -""") - -# Ignore stderr, because if Qt is not installed, -# there may be a warning about an empty QTDIR on stderr. -test.run(arguments="--warn=no-tool-qt-deprecated", chdir='work2', stderr=None) - -test.must_exist(['work2', 'main' + _exe]) - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/qt_warnings.py scons-4.5.2+dfsg/test/QT/qt_warnings.py --- scons-4.4.0+dfsg/test/QT/qt_warnings.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/qt_warnings.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,104 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Test the Qt tool warnings. -""" - -import os -import re - -import TestSCons - -test = TestSCons.TestSCons() - -SConstruct_path = test.workpath('SConstruct') - -test.Qt_dummy_installation() - -test.Qt_create_SConstruct(SConstruct_path) - -test.write('aaa.cpp', r""" -#include "my_qobject.h" -void aaa(void) Q_OBJECT -""") - -test.write('SConscript', r""" -Import("env") -import os -env.StaticLibrary('aaa.cpp') -""") - -test.run(arguments="--warn=no-tool-qt-deprecated", stderr=None) - -match12 = r""" -scons: warning: Generated moc file 'aaa.moc' is not included by 'aaa.cpp' -""" + TestSCons.file_expr - -if not re.search(match12, test.stderr()): - print("Did not find expected regular expression in stderr:") - print(test.stderr()) - test.fail_test() - -os.environ['QTDIR'] = test.QT - -test.run(arguments='--warn=no-tool-qt-deprecated -n noqtdir=1') - -# We'd like to eliminate $QTDIR from the environment as follows: -# del os.environ['QTDIR'] -# But unfortunately, in at least some versions of Python, the Environment -# class doesn't implement a __delitem__() method to make the library -# call to actually remove the deleted variable from the *external* -# environment, so it only gets removed from the Python dictionary. -# Consequently, we need to just wipe out its value as follows> -os.environ['QTDIR'] = '' -test.run(stderr=None, arguments='--warn=no-tool-qt-deprecated -n noqtdir=1') - -moc = test.where_is('moc') -if moc: - import os.path - qtdir = os.path.dirname(os.path.dirname(moc)) - qtdir = qtdir.replace('\\', '\\\\' ) - - expect = r""" -scons: warning: Could not detect qt, using moc executable as a hint \(QTDIR=%s\) -File "%s", line \d+, in (\?|) -""" % (qtdir, re.escape(SConstruct_path)) -else: - - expect = r""" -scons: warning: Could not detect qt, using empty QTDIR -File "%s", line \d+, in (\?|) -""" % re.escape(SConstruct_path) - -test.fail_test(not test.match_re(test.stderr(), expect)) - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/reentrant.py scons-4.5.2+dfsg/test/QT/reentrant.py --- scons-4.4.0+dfsg/test/QT/reentrant.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/reentrant.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,74 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Test creation from a copied environment that already has QT variables. -This makes sure the tool initialization is re-entrant. -""" - -import TestSCons - -test = TestSCons.TestSCons() - -test.Qt_dummy_installation('qt') - -test.write(['qt', 'include', 'foo5.h'], """\ -#include -void -foo5(void) -{ -#ifdef FOO - printf("qt/include/foo5.h\\n"); -#endif -} -""") - -test.Qt_create_SConstruct('SConstruct') - -test.write('SConscript', """\ -Import("env") -env = env.Clone(tools=['qt']) -env.Program('main', 'main.cpp', CPPDEFINES=['FOO'], LIBS=[]) -""") - -test.write('main.cpp', r""" -#include "foo5.h" -int main(void) { foo5(); return 0; } -""") - -test.run(arguments="--warn=no-tool-qt-deprecated") - -test.run( - arguments='--warn=no-tool-qt-deprecated', - program=test.workpath('main' + TestSCons._exe), - stdout='qt/include/foo5.h\n', -) -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/source-from-ui.py scons-4.5.2+dfsg/test/QT/source-from-ui.py --- scons-4.4.0+dfsg/test/QT/source-from-ui.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/source-from-ui.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,161 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Create .cpp, .h, moc_....cpp from a .ui file. -""" - -import os.path - -import TestSCons - -test = TestSCons.TestSCons() - -test.Qt_dummy_installation() - -############################################################################## - -aaa_dll = TestSCons.dll_ + 'aaa' + TestSCons._dll -moc = 'moc_aaa.cc' -cpp = 'uic_aaa.cc' -obj = TestSCons.shobj_ + os.path.splitext(cpp)[0] + TestSCons._shobj -h = 'aaa.h' - -test.Qt_create_SConstruct('SConstruct') - -test.write('SConscript', """\ -Import("env dup") -if dup == 0: env.Append(CPPPATH=['#', '.']) -env.SharedLibrary(target = 'aaa', source = ['aaa.ui', 'useit.cpp']) -""") - -test.write('aaa.ui', r""" -#if defined (_WIN32) || defined(__CYGWIN__) -#define DLLEXPORT __declspec(dllexport) -#else -#define DLLEXPORT -#endif -DLLEXPORT void aaa(void) -""") - -test.write('useit.cpp', r""" -#include "aaa.h" -void useit() { - aaa(); -} -""") - -test.run(arguments="--warn=no-tool-qt-deprecated " + aaa_dll) - -test.up_to_date(options='--warn=no-tool-qt-deprecated -n', arguments=aaa_dll) - -test.write('aaa.ui', r""" -/* a change */ -#if defined (_WIN32) || defined(__CYGWIN__) -#define DLLEXPORT __declspec(dllexport) -#else -#define DLLEXPORT -#endif -DLLEXPORT void aaa(void) -""") - -test.not_up_to_date(options='--warn=no-tool-qt-deprecated -n', arguments=moc) -test.not_up_to_date(options='--warn=no-tool-qt-deprecated -n', arguments=cpp) -test.not_up_to_date(options='--warn=no-tool-qt-deprecated -n', arguments=h) - -test.run(arguments="--warn=no-tool-qt-deprecated " + aaa_dll) - -test.write('aaa.ui', r""" -void aaa(void) -//aaa.ui.h -""") - -# test that non-existant ui.h files are ignored (as uic does) -test.run(arguments="--warn=no-tool-qt-deprecated " + aaa_dll) - -test.write('aaa.ui.h', r""" -/* test dependency to .ui.h */ -""") - -test.run(arguments="--warn=no-tool-qt-deprecated " + aaa_dll) - -test.write('aaa.ui.h', r""" -/* changed */ -""") - -test.not_up_to_date(options='--warn=no-tool-qt-deprecated -n', arguments=obj) -test.not_up_to_date(options='--warn=no-tool-qt-deprecated -n', arguments=cpp) -test.not_up_to_date(options='--warn=no-tool-qt-deprecated -n', arguments=h) -test.not_up_to_date(options='--warn=no-tool-qt-deprecated -n', arguments=moc) - -# clean up -test.run(arguments="--warn=no-tool-qt-deprecated -c " + aaa_dll) - -test.run( - arguments="--warn=no-tool-qt-deprecated variant_dir=1 " - + test.workpath('build', aaa_dll) -) - -test.must_exist(test.workpath('build', moc)) -test.must_exist(test.workpath('build', cpp)) -test.must_exist(test.workpath('build', h)) -test.must_not_exist(test.workpath(moc)) -test.must_not_exist(test.workpath(cpp)) -test.must_not_exist(test.workpath(h)) - -cppContents = test.read(test.workpath('build', cpp), mode='r') -test.fail_test(cppContents.find('#include "aaa.ui.h"') == -1) - -test.run( - arguments="--warn=no-tool-qt-deprecated variant_dir=1 chdir=1 " - + test.workpath('build', aaa_dll) -) - -test.must_exist(test.workpath('build', moc)) -test.must_exist(test.workpath('build', cpp)) -test.must_exist(test.workpath('build', h)) -test.must_not_exist(test.workpath(moc)) -test.must_not_exist(test.workpath(cpp)) -test.must_not_exist(test.workpath(h)) - -test.run( - arguments="--warn=no-tool-qt-deprecated variant_dir=1 chdir=1 dup=0 " - + test.workpath('build_dup0', aaa_dll) -) - -test.must_exist(test.workpath('build_dup0', moc)) -test.must_exist(test.workpath('build_dup0', cpp)) -test.must_exist(test.workpath('build_dup0', h)) -test.must_not_exist(test.workpath(moc)) -test.must_not_exist(test.workpath(cpp)) -test.must_not_exist(test.workpath(h)) - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/Tool.py scons-4.5.2+dfsg/test/QT/Tool.py --- scons-4.4.0+dfsg/test/QT/Tool.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/Tool.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,156 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -""" -Verify that applying env.Tool('qt') after running Configure checks -works properly. This was broken in 0.96.95. - -The configuration here is a moderately stripped-down version of the -real-world configuration for lprof (lprof.sourceforge.net). It's probably -not completely minimal, but we're leaving it as-is since it represents a -good real-world sanity check on the interaction of some key subsystems. -""" - -import os - -import TestSCons - -test = TestSCons.TestSCons() - -if not os.environ.get('QTDIR', None): - x ="External environment variable $QTDIR not set; skipping test(s).\n" - test.skip_test(x) - -test.write('SConstruct', """ -import os - -def DoWithVariables(variables, prefix, what): - saved_variables = { } - for name in variables.keys(): - saved_variables[ name ] = env[ name ][:] - env[ name ].append(variables[ name ]) - - result = what() - - for name in saved_variables.keys(): - env[ name ] = saved_variables[ name ] - env[ prefix+name ] = variables[ name ] - - return result - -def CheckForQtAt(context, qtdir): - context.Message('Checking for Qt at %s... ' % qtdir) - libp = os.path.join(qtdir, 'lib') - cppp = os.path.join(qtdir, 'include') - result = AttemptLinkWithVariables(context, - { "LIBS": "qt-mt", "LIBPATH": libp , "CPPPATH": cppp }, - ''' -#include -int main(int argc, char **argv) { - QApplication qapp(argc, argv); - return 0; -} -''',".cpp","QT_") - context.Result(result) - return result - -def CheckForQt(context): - # list is currently POSIX centric - what happens with Windows? - potential_qt_dirs = [ - "/usr/share/qt3", # Debian unstable - "/usr/share/qt", - "/usr", - "/usr/local", - "/usr/lib/qt3", # Suse - "/usr/lib/qt", - "/usr/qt/3", # Gentoo - "/usr/pkg/qt3" # pkgsrc (NetBSD) - ] - - if 'QTDIR' in os.environ: - potential_qt_dirs.insert(0, os.environ[ 'QTDIR' ]) - - if env[ 'qt_directory' ] != "/": - uic_path = os.path.join(env['qt_directory'], 'bin', 'uic') - if os.path.isfile(uic_path): - potential_qt_dirs.insert(0, env[ 'qt_directory' ]) - else: - print("QT not found. Invalid qt_directory value - failed to find uic.") - return 0 - - for i in potential_qt_dirs: - context.env.Replace(QTDIR = i) - if CheckForQtAt(context, i): - # additional checks to validate QT installation - if not os.path.isfile(os.path.join(i, 'bin', 'uic')): - print("QT - failed to find uic.") - return 0 - if not os.path.isfile(os.path.join(i, 'bin', 'moc')): - print("QT - failed to find moc.") - return 0 - if not os.path.exists(os.path.join(i, 'lib')): - print("QT - failed to find QT lib path.") - return 0 - if not os.path.exists(os.path.join(i, 'include')): - print("QT - failed to find QT include path.") - return 0 - return 1 - else: - if i==env['qt_directory']: - print("QT directory not valid. Failed QT test build.") - return 0 - return 0 - -def AttemptLinkWithVariables(context, variables, code, extension, prefix): - return DoWithVariables(variables, prefix, - lambda: context.TryLink(code, extension)) - -env = Environment(CPPPATH=['.'], LIBPATH=['.'], LIBS=[]) - -opts = Variables('lprof.conf') -opts.Add(PathVariable("qt_directory", "Path to Qt directory", "/")) -opts.Update(env) - -env['QT_LIB'] = 'qt-mt' -config = env.Configure(custom_tests = { - 'CheckForQt' : CheckForQt, -}) - -if not config.CheckForQt(): - print("Failed to find valid QT environment.") - Exit(1) - -env.Tool('qt', ['$TOOL_PATH']) -""") - -test.run(arguments='--warn=no-tool-qt-deprecated .') - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/QT/up-to-date.py scons-4.5.2+dfsg/test/QT/up-to-date.py --- scons-4.4.0+dfsg/test/QT/up-to-date.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/QT/up-to-date.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,144 +0,0 @@ -#!/usr/bin/env python -# -# MIT License -# -# Copyright The SCons Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -""" -Validate that a stripped-down real-world Qt configuation (thanks -to Leanid Nazdrynau) with a generated .h file is correctly -up-to-date after a build. - -(This catches a bug that was introduced during a signature refactoring -ca. September 2005.) -""" - -import os - -import TestSCons - -_obj = TestSCons._obj - -test = TestSCons.TestSCons() - -if not os.environ.get('QTDIR', None): - x ="External environment variable $QTDIR not set; skipping test(s).\n" - test.skip_test(x) - -test.subdir('layer', - ['layer', 'aclock'], - ['layer', 'aclock', 'qt_bug']) - -test.write('SConstruct', """\ -import os -aa=os.getcwd() - -env=Environment(tools=['default','expheaders','qt'],toolpath=[aa]) -env["EXP_HEADER_ABS"]=os.path.join(os.getcwd(),'include') -if not os.access(env["EXP_HEADER_ABS"],os.F_OK): - os.mkdir (env["EXP_HEADER_ABS"]) -Export('env') -env.SConscript('layer/aclock/qt_bug/SConscript') -""") - -test.write('expheaders.py', """\ -import SCons.Defaults -def ExpHeaderScanner(node, env, path): - return [] -def generate(env): - HeaderAction=SCons.Action.Action([SCons.Defaults.Copy('$TARGET','$SOURCE'),SCons.Defaults.Chmod('$TARGET',0o755)]) - HeaderBuilder= SCons.Builder.Builder(action=HeaderAction) - env['BUILDERS']['ExportHeaders'] = HeaderBuilder -def exists(env): - return 0 -""") - -test.write(['layer', 'aclock', 'qt_bug', 'SConscript'], """\ -import os - -Import ("env") -env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'main.h'), 'main.h') -env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'migraform.h'), 'migraform.h') -env.Append(CPPPATH=env["EXP_HEADER_ABS"]) -env.StaticLibrary('all',['main.ui','migraform.ui','my.cc']) -""") - -test.write(['layer', 'aclock', 'qt_bug', 'main.ui'], """\ - -Main - - - Main - - - - 0 - 0 - 600 - 385 - - - - - migraform.h - - -""") - -test.write(['layer', 'aclock', 'qt_bug', 'migraform.ui'], """\ - -MigrateForm - - - MigrateForm - - - - 0 - 0 - 600 - 385 - - - - -""") - -test.write(['layer', 'aclock', 'qt_bug', 'my.cc'], """\ -#include -""") - -my_obj = 'layer/aclock/qt_bug/my' + _obj - -test.run(arguments='--warn=no-tool-qt-deprecated ' + my_obj, stderr=None) - -expect = my_obj.replace('/', os.sep) -test.up_to_date(options='--debug=explain', arguments=expect, stderr=None) - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/rebuild-generated.py scons-4.5.2+dfsg/test/rebuild-generated.py --- scons-4.4.0+dfsg/test/rebuild-generated.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/rebuild-generated.py 2023-03-21 16:17:04.000000000 +0000 @@ -83,7 +83,7 @@ kernelDefines = env.Command("header.hh", "header.hh.in", Copy('$TARGET', '$SOURCE')) kernelImporterSource = env.Command("generated.cc", ["%s"], "%s") kernelImporter = env.Program(kernelImporterSource + ["main.cc"]) -kernelImports = env.Command("KernelImport.hh", kernelImporter, ".%s$SOURCE > $TARGET") +kernelImports = env.Command("KernelImport.hh", kernelImporter, r".%s$SOURCE > $TARGET") osLinuxModule = env.StaticObject(["target.cc"]) """ % (generator_name, kernel_action, sep)) diff -Nru scons-4.4.0+dfsg/test/Repository/include.py scons-4.5.2+dfsg/test/Repository/include.py --- scons-4.4.0+dfsg/test/Repository/include.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Repository/include.py 2023-03-21 16:17:04.000000000 +0000 @@ -37,6 +37,7 @@ test.write(['work', 'SConstruct'], """ Repository(r'%s') +DefaultEnvironment(tools=[]) # test speedup env = Environment(CPPPATH = ['.']) env.Program(target = 'foo', source = 'foo.c') """ % repository) diff -Nru scons-4.4.0+dfsg/test/Repository/JavaH.py scons-4.5.2+dfsg/test/Repository/JavaH.py --- scons-4.4.0+dfsg/test/Repository/JavaH.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Repository/JavaH.py 2023-03-21 16:17:04.000000000 +0000 @@ -79,6 +79,7 @@ # test.write(['rep1', 'SConstruct'], """ +DefaultEnvironment(tools=[]) # test speedup env = Environment(tools=['javac', 'javah'], JAVAC=r'"%s"', JAVAH=r'"%s"') classes = env.Java(target='classes', source='src') env.JavaH(target='outdir', source=classes) diff -Nru scons-4.4.0+dfsg/test/Repository/Java.py scons-4.5.2+dfsg/test/Repository/Java.py --- scons-4.4.0+dfsg/test/Repository/Java.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Repository/Java.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test building Java applications when using Repositories. @@ -58,6 +57,7 @@ # test.write(['rep1', 'SConstruct'], """ +DefaultEnvironment(tools=[]) # test speedup env = Environment(tools = ['javac'], JAVAC = r'"%s"') env.Java(target = 'classes', source = 'src') diff -Nru scons-4.4.0+dfsg/test/Repository/Program.py scons-4.5.2+dfsg/test/Repository/Program.py --- scons-4.4.0+dfsg/test/Repository/Program.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Repository/Program.py 2023-03-21 16:17:04.000000000 +0000 @@ -36,7 +36,7 @@ work1_foo_c = test.workpath('work1', 'foo.c') test.write(['work1', 'SConstruct'], r""" -DefaultEnvironment(tools=[]) +DefaultEnvironment(tools=[]) # test speedup Repository(r'%s') env = Environment(IMPLICIT_COMMAND_DEPENDENCIES=%s) env.Program(target= 'foo', source = Split('aaa.c bbb.c foo.c')) @@ -179,7 +179,7 @@ work2_foo = test.workpath('work2', 'foo' + _exe) test.write(['work2', 'SConstruct'], r""" -DefaultEnvironment(tools=[]) +DefaultEnvironment(tools=[]) # test speedup Repository(r'%s') Repository(r'%s') env = Environment() diff -Nru scons-4.4.0+dfsg/test/Repository/RMIC.py scons-4.5.2+dfsg/test/Repository/RMIC.py --- scons-4.4.0+dfsg/test/Repository/RMIC.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Repository/RMIC.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test building Java applications when using Repositories. @@ -92,6 +91,7 @@ # test.write(['rep1', 'SConstruct'], """ +DefaultEnvironment(tools=[]) # test speedup env = Environment(tools = ['javac', 'rmic'], JAVAC = r'"%s"', RMIC = r'"%s"') @@ -360,6 +360,7 @@ # test.write(['work3', 'SConstruct'], """ +DefaultEnvironment(tools=[]) # test speedup env = Environment(tools = ['javac', 'rmic'], JAVAC = r'"%s"', RMIC = r'"%s"') diff -Nru scons-4.4.0+dfsg/test/Repository/StaticLibrary.py scons-4.5.2+dfsg/test/Repository/StaticLibrary.py --- scons-4.4.0+dfsg/test/Repository/StaticLibrary.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Repository/StaticLibrary.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import os.path import TestSCons @@ -54,6 +53,8 @@ # test.write(['repository', 'SConstruct'], """ + +DefaultEnvironment(tools=[]) # test speedup env = Environment(LIBS = ['xxx'], LIBPATH = '.', IMPLICIT_COMMAND_DEPENDENCIES=%s) env.Library(target = 'xxx', source = ['aaa.c', 'bbb.c']) diff -Nru scons-4.4.0+dfsg/test/Repository/VariantDir.py scons-4.5.2+dfsg/test/Repository/VariantDir.py --- scons-4.4.0+dfsg/test/Repository/VariantDir.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Repository/VariantDir.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import os.path @@ -38,6 +37,7 @@ # test.write(['repository', 'SConstruct'], r""" +DefaultEnvironment(tools=[]) # test speedup VariantDir('build0', 'src', duplicate=0) VariantDir('build1', 'src', duplicate=1) SConscript('build0/SConscript') diff -Nru scons-4.4.0+dfsg/test/Repository/variants.py scons-4.5.2+dfsg/test/Repository/variants.py --- scons-4.4.0+dfsg/test/Repository/variants.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Repository/variants.py 2023-03-21 16:17:04.000000000 +0000 @@ -77,6 +77,7 @@ test.write(['repository', 'SConstruct'], r""" OS = ARGUMENTS.get('OS', '') build1_os = "#build1/" + OS +DefaultEnvironment(tools=[]) # test speedup default = Environment() ccflags = { '': '', diff -Nru scons-4.4.0+dfsg/test/runtest/python.py scons-4.5.2+dfsg/test/runtest/python.py --- scons-4.4.0+dfsg/test/runtest/python.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/runtest/python.py 2023-03-21 16:17:04.000000000 +0000 @@ -50,20 +50,12 @@ # getting called with "/bin/../bin/python" as first argument, e.g. Fedora 17 Desktop. mypython = os.path.normpath(os.path.join(head, dir, os.path.pardir, dir, python)) -def escape(s): - return s.replace('\\', '\\\\') - -if re.search(r'\s', mypython): - mypythonstring = '"%s"' % escape(mypython) -else: - mypythonstring = escape(mypython) - test.subdir('test') test.write_passing_test(['test', 'pass.py']) expect_stdout = """\ -%(mypythonstring)s%(pythonflags)s %(test_pass_py)s +%(mypython)s%(pythonflags)s %(test_pass_py)s PASSING TEST STDOUT """ % locals() diff -Nru scons-4.4.0+dfsg/test/sconsign/script/Configure.py scons-4.5.2+dfsg/test/sconsign/script/Configure.py --- scons-4.4.0+dfsg/test/sconsign/script/Configure.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/sconsign/script/Configure.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify that we can print .sconsign files with Configure context @@ -58,6 +57,9 @@ test.write('SConstruct', """ import os + +DefaultEnvironment(tools=[]) + env = Environment(ENV={'PATH' : os.environ.get('PATH','')}) conf = Configure(env) r1 = conf.CheckCHeader( 'math.h' ) @@ -84,10 +86,10 @@ %(sig_re)s \[.*\] conftest_%(sig_re)s_0_%(sig_re)s%(_obj)s: %(_sconf_temp_conftest_0_c)s: %(sig_re)s \d+ \d+ - %(CC)s: %(sig_re)s \d+ \d+ + %(CC)s: %(sig_re)s None None %(sig_re)s \[.*\] === %(CC_dir)s: -%(CC_file)s: %(sig_re)s \d+ \d+ +%(CC_file)s: None None None """ % locals() # grab .sconsign or .sconsign_ diff -Nru scons-4.4.0+dfsg/test/TAR/TAR.py scons-4.5.2+dfsg/test/TAR/TAR.py --- scons-4.4.0+dfsg/test/TAR/TAR.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/TAR/TAR.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import os @@ -58,6 +57,7 @@ """) test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env = Environment(tools = ['tar'], TAR = r'%(_python_)s mytar.py') env.Tar(target = 'aaa.tar', source = ['file1', 'file2']) env.Tar(target = 'aaa.tar', source = 'file3') @@ -89,6 +89,7 @@ test.file_fixture('wrapper.py') test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup foo = Environment() tar = foo.Dictionary('TAR') bar = Environment(TAR = r'%(_python_)s wrapper.py ' + tar) diff -Nru scons-4.4.0+dfsg/test/TARGETS.py scons-4.5.2+dfsg/test/TARGETS.py --- scons-4.4.0+dfsg/test/TARGETS.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/TARGETS.py 2023-03-21 16:17:04.000000000 +0000 @@ -66,6 +66,7 @@ test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env = Environment() print(list(map(str, DEFAULT_TARGETS))) print(list(map(str, BUILD_TARGETS))) diff -Nru scons-4.4.0+dfsg/test/TEX/biber_biblatex2.py scons-4.5.2+dfsg/test/TEX/biber_biblatex2.py --- scons-4.4.0+dfsg/test/TEX/biber_biblatex2.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/TEX/biber_biblatex2.py 2023-03-21 16:17:04.000000000 +0000 @@ -72,7 +72,7 @@ """ test.write(['ref.bib'],sources_bib_content % '2013' ) -test.write(['bibertest.tex'],r""" +sources_tex_content = r""" \documentclass{article} \usepackage{biblatex} @@ -80,13 +80,14 @@ \begin{document} -Hello. This is boring. +Hello. This is %s boring. \cite{mybook} And even more boring. \printbibliography \end{document} -""") +""" +test.write(['bibertest.tex'], sources_tex_content % "") test.run() @@ -110,7 +111,17 @@ pdf_output_1 = test.read('bibertest.pdf') +# Change tex, but don't change bib. In this case, pdf should still be rebuilt. +test.write(['bibertest.tex'], sources_tex_content % "very") +test.run() +pdf_output_1a = test.read('bibertest.pdf') +# If the PDF file is the same as it was previously, then it didn't +# pick up the change in the tex file, so fail. +test.fail_test(pdf_output_1 == pdf_output_1a) + +# Change bib. +test.write(['bibertest.tex'], sources_tex_content % "") test.write(['ref.bib'],sources_bib_content % '1982') test.run() diff -Nru scons-4.4.0+dfsg/test/TEX/bibtex-latex-rerun.py scons-4.5.2+dfsg/test/TEX/bibtex-latex-rerun.py --- scons-4.4.0+dfsg/test/TEX/bibtex-latex-rerun.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/TEX/bibtex-latex-rerun.py 2023-03-21 16:17:04.000000000 +0000 @@ -47,14 +47,14 @@ env.PDF( 'bibtest.tex' ) """) -test.write(['bibtest.tex'], r""" +sources_tex_content = r""" \documentclass{article} \begin{document} -Learn about cool math in \cite{koblitz:elliptic_curves}. +Learn about %s cool math in \cite{koblitz:elliptic_curves}. \bibliographystyle{alpha} \bibliography{sources} \end{document} -""") +""" sources_bib_content = r""" @book{koblitz:elliptic_curves, @@ -67,14 +67,24 @@ +test.write(['bibtest.tex'], sources_tex_content % "") test.write('sources.bib', sources_bib_content % '1981') test.run() pdf_output_1 = test.read('bibtest.pdf') +# Change tex, but don't change bib. In this case, pdf should still be rebuilt. +test.write(['bibtest.tex'], sources_tex_content % "really") +test.run() +pdf_output_1a = test.read('bibtest.pdf') +# If the PDF file is the same as it was previously, then it didn't +# pick up the change in the tex file, so fail. +test.fail_test(pdf_output_1 == pdf_output_1a) +# Change bib. +test.write(['bibtest.tex'], sources_tex_content % "") test.write('sources.bib', sources_bib_content % '1982') test.run() diff -Nru scons-4.4.0+dfsg/test/TEX/newglossary.py scons-4.5.2+dfsg/test/TEX/newglossary.py --- scons-4.4.0+dfsg/test/TEX/newglossary.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/TEX/newglossary.py 2023-03-21 16:17:04.000000000 +0000 @@ -87,12 +87,12 @@ a definition \gls{defPower} -\glossarystyle{index} +\setglossarystyle{index} \printglossary[type=symbol] \printglossary[type=acronym] \printglossary[type=main] \printglossary[type=definition] -\glossarystyle{super} +\setglossarystyle{super} \end{document}""") diff -Nru scons-4.4.0+dfsg/test/textfile/fixture/SConstruct scons-4.5.2+dfsg/test/textfile/fixture/SConstruct --- scons-4.4.0+dfsg/test/textfile/fixture/SConstruct 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/textfile/fixture/SConstruct 2023-03-21 16:17:04.000000000 +0000 @@ -2,7 +2,8 @@ env = Environment(tools=['textfile']) data0 = ['Goethe', 'Schiller'] -data = ['lalala', 42, data0, 'tanteratei'] +data = ['lalala', 42, data0, 'tanteratei', + '×'] # <-- this is unicode /xd7 symbol env.Textfile('foo1', data) env.Textfile('foo2', data, LINESEPARATOR='|*') diff -Nru scons-4.4.0+dfsg/test/textfile/textfile.py scons-4.5.2+dfsg/test/textfile/textfile.py --- scons-4.4.0+dfsg/test/textfile/textfile.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/textfile/textfile.py 2023-03-21 16:17:04.000000000 +0000 @@ -34,7 +34,8 @@ # foo1a = test.workpath('foo1a.txt') # foo2a = test.workpath('foo2a.txt') -match_mode = 'r' +# Must be read binary as now we're including unicode characters in our textparts +match_mode = 'rb' test.file_fixture('fixture/SConstruct', 'SConstruct') @@ -44,7 +45,8 @@ textparts = ['lalala', '42', 'Goethe', 'Schiller', - 'tanteratei'] + 'tanteratei', + '×'] # <-- this is unicode /xd7 symbol foo1Text = linesep.join(textparts) foo2Text = '|*'.join(textparts) foo1aText = foo1Text + linesep diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/subdir2/Toolpath_TestTool2_1.py scons-4.5.2+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/subdir2/Toolpath_TestTool2_1.py --- scons-4.4.0+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/subdir2/Toolpath_TestTool2_1.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/subdir2/Toolpath_TestTool2_1.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['Toolpath_TestTool2_1'] = 1 -def exists(env): - return 1 +def generate(env): + env['Toolpath_TestTool2_1'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/subdir2/Toolpath_TestTool2_2/__init__.py scons-4.5.2+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/subdir2/Toolpath_TestTool2_2/__init__.py --- scons-4.4.0+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/subdir2/Toolpath_TestTool2_2/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/subdir2/Toolpath_TestTool2_2/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['Toolpath_TestTool2_2'] = 1 -def exists(env): - return 1 +def generate(env): + env['Toolpath_TestTool2_2'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/Toolpath_TestTool1_1.py scons-4.5.2+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/Toolpath_TestTool1_1.py --- scons-4.4.0+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/Toolpath_TestTool1_1.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/Toolpath_TestTool1_1.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['Toolpath_TestTool1_1'] = 1 -def exists(env): - return 1 +def generate(env): + env['Toolpath_TestTool1_1'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/Toolpath_TestTool1_2/__init__.py scons-4.5.2+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/Toolpath_TestTool1_2/__init__.py --- scons-4.4.0+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/Toolpath_TestTool1_2/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/Libs/tools_example/subdir1/Toolpath_TestTool1_2/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['Toolpath_TestTool1_2'] = 1 -def exists(env): - return 1 +def generate(env): + env['Toolpath_TestTool1_2'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/Libs/tools_example/Toolpath_TestTool1.py scons-4.5.2+dfsg/test/toolpath/nested/image/Libs/tools_example/Toolpath_TestTool1.py --- scons-4.4.0+dfsg/test/toolpath/nested/image/Libs/tools_example/Toolpath_TestTool1.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/Libs/tools_example/Toolpath_TestTool1.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['Toolpath_TestTool1'] = 1 -def exists(env): - return 1 +def generate(env): + env['Toolpath_TestTool1'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/Libs/tools_example/Toolpath_TestTool2/__init__.py scons-4.5.2+dfsg/test/toolpath/nested/image/Libs/tools_example/Toolpath_TestTool2/__init__.py --- scons-4.4.0+dfsg/test/toolpath/nested/image/Libs/tools_example/Toolpath_TestTool2/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/Libs/tools_example/Toolpath_TestTool2/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['Toolpath_TestTool2'] = 1 -def exists(env): - return 1 +def generate(env): + env['Toolpath_TestTool2'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/SConstruct scons-4.5.2+dfsg/test/toolpath/nested/image/SConstruct --- scons-4.4.0+dfsg/test/toolpath/nested/image/SConstruct 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/SConstruct 2023-03-21 16:17:04.000000000 +0000 @@ -1,69 +1,69 @@ -import sys, os - -toollist = ['Toolpath_TestTool1', - 'Toolpath_TestTool2', - 'subdir1.Toolpath_TestTool1_1', - 'subdir1.Toolpath_TestTool1_2', - 'subdir1.subdir2.Toolpath_TestTool2_1', - 'subdir1.subdir2.Toolpath_TestTool2_2', - ] - -print('Test where tools are located under site_scons/site_tools') -env1 = Environment(tools=toollist) -print("env1['Toolpath_TestTool1'] = %s"%env1.get('Toolpath_TestTool1')) -print("env1['Toolpath_TestTool2'] = %s"%env1.get('Toolpath_TestTool2')) -print("env1['Toolpath_TestTool1_1'] = %s"%env1.get('Toolpath_TestTool1_1')) -print("env1['Toolpath_TestTool1_2'] = %s"%env1.get('Toolpath_TestTool1_2')) -print("env1['Toolpath_TestTool2_1'] = %s"%env1.get('Toolpath_TestTool2_1')) -print("env1['Toolpath_TestTool2_2'] = %s"%env1.get('Toolpath_TestTool2_2')) - -print('Test where toolpath is set in the env constructor') -env2 = Environment(tools=toollist, toolpath=['Libs/tools_example']) -print("env2['Toolpath_TestTool1'] = %s"%env2.get('Toolpath_TestTool1')) -print("env2['Toolpath_TestTool2'] = %s"%env2.get('Toolpath_TestTool2')) -print("env2['Toolpath_TestTool1_1'] = %s"%env2.get('Toolpath_TestTool1_1')) -print("env2['Toolpath_TestTool1_2'] = %s"%env2.get('Toolpath_TestTool1_2')) -print("env2['Toolpath_TestTool2_1'] = %s"%env2.get('Toolpath_TestTool2_1')) -print("env2['Toolpath_TestTool2_2'] = %s"%env2.get('Toolpath_TestTool2_2')) - -print('Test a Clone') -base = Environment(tools=[], toolpath=['Libs/tools_example']) -derived = base.Clone(tools=['subdir1.Toolpath_TestTool1_1']) -print("derived['Toolpath_TestTool1_1'] = %s"%derived.get('Toolpath_TestTool1_1')) - - -print('Test using syspath as the toolpath') -print('Lets pretend that tools_example within Libs is actually a module installed via pip') -oldsyspath = sys.path -dir_path = Dir('.').srcnode().abspath -dir_path = os.path.join(dir_path, 'Libs') -sys.path.append(dir_path) - -searchpaths = [] -for item in sys.path: - if os.path.isdir(item): searchpaths.append(item) - -toollist = ['tools_example.Toolpath_TestTool1', - 'tools_example.Toolpath_TestTool2', - 'tools_example.subdir1.Toolpath_TestTool1_1', - 'tools_example.subdir1.Toolpath_TestTool1_2', - 'tools_example.subdir1.subdir2.Toolpath_TestTool2_1', - 'tools_example.subdir1.subdir2.Toolpath_TestTool2_2', - ] - -env3 = Environment(tools=toollist, toolpath=searchpaths) -print("env3['Toolpath_TestTool1'] = %s"%env3.get('Toolpath_TestTool1')) -print("env3['Toolpath_TestTool2'] = %s"%env3.get('Toolpath_TestTool2')) -print("env3['Toolpath_TestTool1_1'] = %s"%env3.get('Toolpath_TestTool1_1')) -print("env3['Toolpath_TestTool1_2'] = %s"%env3.get('Toolpath_TestTool1_2')) -print("env3['Toolpath_TestTool2_1'] = %s"%env3.get('Toolpath_TestTool2_1')) -print("env3['Toolpath_TestTool2_2'] = %s"%env3.get('Toolpath_TestTool2_2')) - - -print('Test using PyPackageDir') -toollist = ['Toolpath_TestTool2_1', 'Toolpath_TestTool2_2'] -env4 = Environment(tools = toollist, toolpath = [PyPackageDir('tools_example.subdir1.subdir2')]) -print("env4['Toolpath_TestTool2_1'] = %s"%env4.get('Toolpath_TestTool2_1')) -print("env4['Toolpath_TestTool2_2'] = %s"%env4.get('Toolpath_TestTool2_2')) - -sys.path = oldsyspath +import sys, os + +toollist = ['Toolpath_TestTool1', + 'Toolpath_TestTool2', + 'subdir1.Toolpath_TestTool1_1', + 'subdir1.Toolpath_TestTool1_2', + 'subdir1.subdir2.Toolpath_TestTool2_1', + 'subdir1.subdir2.Toolpath_TestTool2_2', + ] + +print('Test where tools are located under site_scons/site_tools') +env1 = Environment(tools=toollist) +print("env1['Toolpath_TestTool1'] = %s"%env1.get('Toolpath_TestTool1')) +print("env1['Toolpath_TestTool2'] = %s"%env1.get('Toolpath_TestTool2')) +print("env1['Toolpath_TestTool1_1'] = %s"%env1.get('Toolpath_TestTool1_1')) +print("env1['Toolpath_TestTool1_2'] = %s"%env1.get('Toolpath_TestTool1_2')) +print("env1['Toolpath_TestTool2_1'] = %s"%env1.get('Toolpath_TestTool2_1')) +print("env1['Toolpath_TestTool2_2'] = %s"%env1.get('Toolpath_TestTool2_2')) + +print('Test where toolpath is set in the env constructor') +env2 = Environment(tools=toollist, toolpath=['Libs/tools_example']) +print("env2['Toolpath_TestTool1'] = %s"%env2.get('Toolpath_TestTool1')) +print("env2['Toolpath_TestTool2'] = %s"%env2.get('Toolpath_TestTool2')) +print("env2['Toolpath_TestTool1_1'] = %s"%env2.get('Toolpath_TestTool1_1')) +print("env2['Toolpath_TestTool1_2'] = %s"%env2.get('Toolpath_TestTool1_2')) +print("env2['Toolpath_TestTool2_1'] = %s"%env2.get('Toolpath_TestTool2_1')) +print("env2['Toolpath_TestTool2_2'] = %s"%env2.get('Toolpath_TestTool2_2')) + +print('Test a Clone') +base = Environment(tools=[], toolpath=['Libs/tools_example']) +derived = base.Clone(tools=['subdir1.Toolpath_TestTool1_1']) +print("derived['Toolpath_TestTool1_1'] = %s"%derived.get('Toolpath_TestTool1_1')) + + +print('Test using syspath as the toolpath') +print('Lets pretend that tools_example within Libs is actually a module installed via pip') +oldsyspath = sys.path +dir_path = Dir('.').srcnode().abspath +dir_path = os.path.join(dir_path, 'Libs') +sys.path.append(dir_path) + +searchpaths = [] +for item in sys.path: + if os.path.isdir(item): searchpaths.append(item) + +toollist = ['tools_example.Toolpath_TestTool1', + 'tools_example.Toolpath_TestTool2', + 'tools_example.subdir1.Toolpath_TestTool1_1', + 'tools_example.subdir1.Toolpath_TestTool1_2', + 'tools_example.subdir1.subdir2.Toolpath_TestTool2_1', + 'tools_example.subdir1.subdir2.Toolpath_TestTool2_2', + ] + +env3 = Environment(tools=toollist, toolpath=searchpaths) +print("env3['Toolpath_TestTool1'] = %s"%env3.get('Toolpath_TestTool1')) +print("env3['Toolpath_TestTool2'] = %s"%env3.get('Toolpath_TestTool2')) +print("env3['Toolpath_TestTool1_1'] = %s"%env3.get('Toolpath_TestTool1_1')) +print("env3['Toolpath_TestTool1_2'] = %s"%env3.get('Toolpath_TestTool1_2')) +print("env3['Toolpath_TestTool2_1'] = %s"%env3.get('Toolpath_TestTool2_1')) +print("env3['Toolpath_TestTool2_2'] = %s"%env3.get('Toolpath_TestTool2_2')) + + +print('Test using PyPackageDir') +toollist = ['Toolpath_TestTool2_1', 'Toolpath_TestTool2_2'] +env4 = Environment(tools = toollist, toolpath = [PyPackageDir('tools_example.subdir1.subdir2')]) +print("env4['Toolpath_TestTool2_1'] = %s"%env4.get('Toolpath_TestTool2_1')) +print("env4['Toolpath_TestTool2_2'] = %s"%env4.get('Toolpath_TestTool2_2')) + +sys.path = oldsyspath diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/subdir2/Toolpath_TestTool2_1.py scons-4.5.2+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/subdir2/Toolpath_TestTool2_1.py --- scons-4.4.0+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/subdir2/Toolpath_TestTool2_1.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/subdir2/Toolpath_TestTool2_1.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['Toolpath_TestTool2_1'] = 1 -def exists(env): - return 1 +def generate(env): + env['Toolpath_TestTool2_1'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/subdir2/Toolpath_TestTool2_2/__init__.py scons-4.5.2+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/subdir2/Toolpath_TestTool2_2/__init__.py --- scons-4.4.0+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/subdir2/Toolpath_TestTool2_2/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/subdir2/Toolpath_TestTool2_2/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['Toolpath_TestTool2_2'] = 1 -def exists(env): - return 1 +def generate(env): + env['Toolpath_TestTool2_2'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/Toolpath_TestTool1_1.py scons-4.5.2+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/Toolpath_TestTool1_1.py --- scons-4.4.0+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/Toolpath_TestTool1_1.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/Toolpath_TestTool1_1.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['Toolpath_TestTool1_1'] = 1 -def exists(env): - return 1 +def generate(env): + env['Toolpath_TestTool1_1'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/Toolpath_TestTool1_2/__init__.py scons-4.5.2+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/Toolpath_TestTool1_2/__init__.py --- scons-4.4.0+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/Toolpath_TestTool1_2/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/site_scons/site_tools/subdir1/Toolpath_TestTool1_2/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['Toolpath_TestTool1_2'] = 1 -def exists(env): - return 1 +def generate(env): + env['Toolpath_TestTool1_2'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/site_scons/site_tools/Toolpath_TestTool1.py scons-4.5.2+dfsg/test/toolpath/nested/image/site_scons/site_tools/Toolpath_TestTool1.py --- scons-4.4.0+dfsg/test/toolpath/nested/image/site_scons/site_tools/Toolpath_TestTool1.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/site_scons/site_tools/Toolpath_TestTool1.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['Toolpath_TestTool1'] = 1 -def exists(env): - return 1 +def generate(env): + env['Toolpath_TestTool1'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/nested/image/site_scons/site_tools/Toolpath_TestTool2/__init__.py scons-4.5.2+dfsg/test/toolpath/nested/image/site_scons/site_tools/Toolpath_TestTool2/__init__.py --- scons-4.4.0+dfsg/test/toolpath/nested/image/site_scons/site_tools/Toolpath_TestTool2/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/nested/image/site_scons/site_tools/Toolpath_TestTool2/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['Toolpath_TestTool2'] = 1 -def exists(env): - return 1 +def generate(env): + env['Toolpath_TestTool2'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/relative_import/image/SConstruct scons-4.5.2+dfsg/test/toolpath/relative_import/image/SConstruct --- scons-4.4.0+dfsg/test/toolpath/relative_import/image/SConstruct 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/relative_import/image/SConstruct 2023-03-21 16:17:04.000000000 +0000 @@ -1,10 +1,10 @@ -env = Environment(tools=['TestTool1', 'TestTool1.TestTool1_2'], toolpath=['tools']) - -# Test a relative import within the root of the tools directory -print("env['TestTool1'] = %s"%env.get('TestTool1')) -print("env['TestTool1_1'] = %s"%env.get('TestTool1_1')) - -# Test a relative import within a sub dir -print("env['TestTool1_2'] = %s"%env.get('TestTool1_2')) -print("env['TestTool1_2_1'] = %s"%env.get('TestTool1_2_1')) -print("env['TestTool1_2_2'] = %s"%env.get('TestTool1_2_2')) +env = Environment(tools=['TestTool1', 'TestTool1.TestTool1_2'], toolpath=['tools']) + +# Test a relative import within the root of the tools directory +print("env['TestTool1'] = %s"%env.get('TestTool1')) +print("env['TestTool1_1'] = %s"%env.get('TestTool1_1')) + +# Test a relative import within a sub dir +print("env['TestTool1_2'] = %s"%env.get('TestTool1_2')) +print("env['TestTool1_2_1'] = %s"%env.get('TestTool1_2_1')) +print("env['TestTool1_2_2'] = %s"%env.get('TestTool1_2_2')) diff -Nru scons-4.4.0+dfsg/test/toolpath/relative_import/image/tools/TestTool1/__init__.py scons-4.5.2+dfsg/test/toolpath/relative_import/image/tools/TestTool1/__init__.py --- scons-4.4.0+dfsg/test/toolpath/relative_import/image/tools/TestTool1/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/relative_import/image/tools/TestTool1/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,9 +1,9 @@ -from . import TestTool1_1 - -def generate(env): - env['TestTool1'] = 1 - # Include another tool within the same directory - TestTool1_1.generate(env) -def exists(env): - TestTool1_1.exists(env) - return 1 +from . import TestTool1_1 + +def generate(env): + env['TestTool1'] = 1 + # Include another tool within the same directory + TestTool1_1.generate(env) +def exists(env): + TestTool1_1.exists(env) + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_1.py scons-4.5.2+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_1.py --- scons-4.4.0+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_1.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_1.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['TestTool1_1'] = 1 -def exists(env): - return 1 +def generate(env): + env['TestTool1_1'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_2/__init__.py scons-4.5.2+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_2/__init__.py --- scons-4.4.0+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_2/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_2/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,11 +1,11 @@ -from . import TestTool1_2_1 -from . import TestTool1_2_2 - -def generate(env): - env['TestTool1_2'] = 1 - TestTool1_2_1.generate(env) - TestTool1_2_2.generate(env) -def exists(env): - TestTool1_2_1.exists(env) - TestTool1_2_2.exists(env) - return 1 +from . import TestTool1_2_1 +from . import TestTool1_2_2 + +def generate(env): + env['TestTool1_2'] = 1 + TestTool1_2_1.generate(env) + TestTool1_2_2.generate(env) +def exists(env): + TestTool1_2_1.exists(env) + TestTool1_2_2.exists(env) + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_2/TestTool1_2_1.py scons-4.5.2+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_2/TestTool1_2_1.py --- scons-4.4.0+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_2/TestTool1_2_1.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_2/TestTool1_2_1.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['TestTool1_2_1'] = 1 -def exists(env): - return 1 +def generate(env): + env['TestTool1_2_1'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_2/TestTool1_2_2/__init__.py scons-4.5.2+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_2/TestTool1_2_2/__init__.py --- scons-4.4.0+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_2/TestTool1_2_2/__init__.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/relative_import/image/tools/TestTool1/TestTool1_2/TestTool1_2_2/__init__.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,4 +1,4 @@ -def generate(env): - env['TestTool1_2_2'] = 1 -def exists(env): - return 1 +def generate(env): + env['TestTool1_2_2'] = 1 +def exists(env): + return 1 diff -Nru scons-4.4.0+dfsg/test/toolpath/relative_import/relative_import.py scons-4.5.2+dfsg/test/toolpath/relative_import/relative_import.py --- scons-4.4.0+dfsg/test/toolpath/relative_import/relative_import.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/toolpath/relative_import/relative_import.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,52 +1,52 @@ -#!/usr/bin/env python -# -# __COPYRIGHT__ -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY -# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" - -import TestSCons - -test = TestSCons.TestSCons() - -test.dir_fixture('image') - -test.run(arguments = '.', stdout = """\ -scons: Reading SConscript files ... -env['TestTool1'] = 1 -env['TestTool1_1'] = 1 -env['TestTool1_2'] = 1 -env['TestTool1_2_1'] = 1 -env['TestTool1_2_2'] = 1 -scons: done reading SConscript files. -scons: Building targets ... -scons: `.' is up to date. -scons: done building targets. -""") - -test.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: +#!/usr/bin/env python +# +# __COPYRIGHT__ +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" + +import TestSCons + +test = TestSCons.TestSCons() + +test.dir_fixture('image') + +test.run(arguments = '.', stdout = """\ +scons: Reading SConscript files ... +env['TestTool1'] = 1 +env['TestTool1_1'] = 1 +env['TestTool1_2'] = 1 +env['TestTool1_2_1'] = 1 +env['TestTool1_2_2'] = 1 +scons: done reading SConscript files. +scons: Building targets ... +scons: `.' is up to date. +scons: done building targets. +""") + +test.pass_test() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/ValidateOptions.py scons-4.5.2+dfsg/test/ValidateOptions.py --- scons-4.4.0+dfsg/test/ValidateOptions.py 1970-01-01 00:00:00.000000000 +0000 +++ scons-4.5.2+dfsg/test/ValidateOptions.py 2023-03-21 16:17:04.000000000 +0000 @@ -0,0 +1,68 @@ +# MIT License +# +# Copyright The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Test ValidateOptions(). +""" + +import TestSCons + +test = TestSCons.TestSCons() +test.file_fixture('fixture/SConstruct-check-valid-options', 'SConstruct') + +# Should see "This is in SConstruct" because all options specified (none) are valid and +# so ValidatedOptions() won't exit before it's printed. +test.run() +test.must_contain_single_instance_of(test.stdout(), ["This is in SConstruct"]) + +# Should see "This is in SConstruct" because all options specified (--testing=abc) are valid and +# so ValidatedOptions() won't exit before it's printed. +test.run(arguments="--testing=abc") +test.must_contain_single_instance_of(test.stdout(), ["This is in SConstruct"]) + +# Should not see "This is in SConstruct" because the option specified (--garbage=xyz) is invalid and +# so ValidatedOptions() will exit before it's printed. +test.run(arguments="--garbage=xyz", status=2, stderr=".*SCons Error: no such option: --garbage.*", + match=TestSCons.match_re_dotall) +test.fail_test(("This is in SConstruct" in test.stdout()), + message='"This is in SConstruct" should not be output. This means ValidateOptions() did not error out before this was printed') + +# Now we'll test having ValidateOptions raise a SConsBadOptionError exception +test.run(arguments="--garbage=xyz raise=1", status=2, + stderr=".*SConsBadOptionError: no such option: no such option: --garbage.*", + match=TestSCons.match_re_dotall) +test.fail_test(("This is in SConstruct" in test.stdout()), + message='"This is in SConstruct" should not be output. This means ValidateOptions() did not error out before this was printed') + +# Now we'll test having ValidateOptions raise a SConsBadOptionError exception and catching that exception +test.run(arguments="--garbage=xyz raise=2", status=3, + stdout=".*Parser is SConsOptionParser:True.*Message is .no such option. --garbage.*", + match=TestSCons.match_re_dotall) +test.fail_test(("This is in SConstruct" in test.stdout()), + message='"This is in SConstruct" should not be output. This means ValidateOptions() did not error out before this was printed') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff -Nru scons-4.4.0+dfsg/test/Value.py scons-4.5.2+dfsg/test/Value.py --- scons-4.4.0+dfsg/test/Value.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Value.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -21,8 +23,6 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" - import re import TestSCons @@ -49,6 +49,7 @@ with open(str(target[0]), 'wb') as f: f.write(source[0].get_contents()) +DefaultEnvironment(tools=[]) # test speedup env = Environment() env['BUILDERS']['B'] = Builder(action = create) env['BUILDERS']['S'] = Builder(action = r'%(_python_)s put.py $SOURCES into $TARGET') diff -Nru scons-4.4.0+dfsg/test/Variables/BoolVariable.py scons-4.5.2+dfsg/test/Variables/BoolVariable.py --- scons-4.4.0+dfsg/test/Variables/BoolVariable.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Variables/BoolVariable.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test the BoolVariable canned Variable type. @@ -40,18 +39,18 @@ assert result[1:len(expect)+1] == expect, (result[1:len(expect)+1], expect) - test.write(SConstruct_path, """\ from SCons.Variables.BoolVariable import BoolVariable + BV = BoolVariable from SCons.Variables import BoolVariable opts = Variables(args=ARGUMENTS) opts.AddVariables( - BoolVariable('warnings', 'compilation with -Wall and similiar', 1), - BV('profile', 'create profiling informations', 0), - ) + BoolVariable('warnings', 'compilation with -Wall and similiar', True), + BV('profile', 'create profiling informations', False), +) env = Environment(variables=opts) Help(opts.GenerateHelpText(env)) @@ -62,8 +61,6 @@ Default(env.Alias('dummy', None)) """) - - test.run() check([str(True), str(False)]) @@ -73,12 +70,10 @@ expect_stderr = """ scons: *** Error converting option: warnings Invalid value for boolean option: irgendwas -""" + test.python_file_line(SConstruct_path, 12) +""" + test.python_file_line(SConstruct_path, 13) test.run(arguments='warnings=irgendwas', stderr = expect_stderr, status=2) - - test.pass_test() # Local Variables: diff -Nru scons-4.4.0+dfsg/test/Variables/help.py scons-4.5.2+dfsg/test/Variables/help.py --- scons-4.4.0+dfsg/test/Variables/help.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Variables/help.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test the Variables help messages. @@ -46,37 +45,52 @@ test.subdir(libpath) test.write('SConstruct', """ -from SCons.Variables import BoolVariable, EnumVariable, ListVariable, \ - PackageVariable, PathVariable +from SCons.Variables import ( + BoolVariable, + EnumVariable, + ListVariable, + PackageVariable, + PathVariable, +) list_of_libs = Split('x11 gl qt ical') qtdir = r'%(qtpath)s' opts = Variables(args=ARGUMENTS) opts.AddVariables( - BoolVariable('warnings', 'compilation with -Wall and similiar', 1), - BoolVariable('profile', 'create profiling informations', 0), - EnumVariable('debug', 'debug output and symbols', 'no', - allowed_values=('yes', 'no', 'full'), - map={}, ignorecase=0), # case sensitive - EnumVariable('guilib', 'gui lib to use', 'gtk', - allowed_values=('motif', 'gtk', 'kde'), - map={}, ignorecase=1), # case insensitive - EnumVariable('some', 'some option', 'xaver', - allowed_values=('xaver', 'eins'), - map={}, ignorecase=2), # make lowercase - ListVariable('shared', - 'libraries to build as shared libraries', - 'all', - names = list_of_libs), - PackageVariable('x11', - 'use X11 installed here (yes = search some places)', - 'yes'), + BoolVariable('warnings', 'compilation with -Wall and similiar', True), + BoolVariable('profile', 'create profiling informations', False), + EnumVariable( + 'debug', + 'debug output and symbols', + 'no', + allowed_values=('yes', 'no', 'full'), + map={}, + ignorecase=0, + ), # case sensitive + EnumVariable( + 'guilib', + 'gui lib to use', + 'gtk', + allowed_values=('motif', 'gtk', 'kde'), + map={}, + ignorecase=1, + ), # case insensitive + EnumVariable( + 'some', + 'some option', + 'xaver', + allowed_values=('xaver', 'eins'), + map={}, + ignorecase=2, + ), # make lowercase + ListVariable( + 'shared', 'libraries to build as shared libraries', 'all', names=list_of_libs + ), + PackageVariable('x11', 'use X11 installed here (yes = search some places)', 'yes'), PathVariable('qtdir', 'where the root of Qt is installed', qtdir), - PathVariable('qt_libraries', - 'where the Qt library is installed', - r'%(libdirvar)s'), - ) + PathVariable('qt_libraries', 'where the Qt library is installed', r'%(libdirvar)s'), +) env = Environment(variables=opts) Help(opts.GenerateHelpText(env)) @@ -96,11 +110,11 @@ scons: done reading SConscript files. warnings: compilation with -Wall and similiar (yes|no) - default: 1 + default: True actual: %(str_True)s profile: create profiling informations (yes|no) - default: 0 + default: False actual: %(str_False)s debug: debug output and symbols (yes|no|full) diff -Nru scons-4.4.0+dfsg/test/Variables/ListVariable.py scons-4.5.2+dfsg/test/Variables/ListVariable.py --- scons-4.4.0+dfsg/test/Variables/ListVariable.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Variables/ListVariable.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test the ListVariable canned Variable type. @@ -62,6 +61,7 @@ LV('listvariable', 'listvariable help', 'all', names=['l1', 'l2', 'l3']) ) +DefaultEnvironment(tools=[]) # test speedup env = Environment(variables=opts) opts.Save(optsfile, env) Help(opts.GenerateHelpText(env)) @@ -115,7 +115,7 @@ expect_stderr = """ scons: *** Error converting option: shared Invalid value(s) for option: foo -""" + test.python_file_line(SConstruct_path, 19) +""" + test.python_file_line(SConstruct_path, 20) test.run(arguments='shared=foo', stderr=expect_stderr, status=2) @@ -124,28 +124,28 @@ expect_stderr = """ scons: *** Error converting option: shared Invalid value(s) for option: foo -""" + test.python_file_line(SConstruct_path, 19) +""" + test.python_file_line(SConstruct_path, 20) test.run(arguments='shared=foo,ical', stderr=expect_stderr, status=2) expect_stderr = """ scons: *** Error converting option: shared Invalid value(s) for option: foo -""" + test.python_file_line(SConstruct_path, 19) +""" + test.python_file_line(SConstruct_path, 20) test.run(arguments='shared=ical,foo', stderr=expect_stderr, status=2) expect_stderr = """ scons: *** Error converting option: shared Invalid value(s) for option: foo -""" + test.python_file_line(SConstruct_path, 19) +""" + test.python_file_line(SConstruct_path, 20) test.run(arguments='shared=ical,foo,x11', stderr=expect_stderr, status=2) expect_stderr = """ scons: *** Error converting option: shared Invalid value(s) for option: foo,bar -""" + test.python_file_line(SConstruct_path, 19) +""" + test.python_file_line(SConstruct_path, 20) test.run(arguments='shared=foo,x11,,,bar', stderr=expect_stderr, status=2) @@ -162,6 +162,7 @@ names = ['ENET', 'GPIB', 'LINUX_GPIB', 'NO_GPIB']), ) +DefaultEnvironment(tools=[]) # test speedup env = Environment(variables=opts) Help(opts.GenerateHelpText(env)) diff -Nru scons-4.4.0+dfsg/test/Variables/PathVariable.py scons-4.5.2+dfsg/test/Variables/PathVariable.py --- scons-4.4.0+dfsg/test/Variables/PathVariable.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Variables/PathVariable.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -20,9 +22,6 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test the PathVariable canned option type, with tests for its @@ -61,6 +60,7 @@ PV('qt_libraries', 'where the Qt library is installed', r'%s'), ) +DefaultEnvironment(tools=[]) # test speedup env = Environment(variables=opts) Help(opts.GenerateHelpText(env)) @@ -92,7 +92,7 @@ check([qtpath, libpath, libpath]) qtpath = os.path.join(workpath, 'non', 'existing', 'path') -SConstruct_file_line = test.python_file_line(test.workpath('SConstruct'), 14)[:-1] +SConstruct_file_line = test.python_file_line(test.workpath('SConstruct'), 15)[:-1] expect_stderr = """ scons: *** Path for option qtdir does not exist: %(qtpath)s @@ -130,6 +130,7 @@ PathVariable('X', 'X variable', r'%s', validator=PathVariable.PathAccept), ) +DefaultEnvironment(tools=[]) # test speedup env = Environment(variables=opts) print(env['X']) @@ -163,6 +164,7 @@ PathVariable('X', 'X variable', r'%s', validator=PathVariable.PathIsFile), ) +DefaultEnvironment(tools=[]) # test speedup env = Environment(variables=opts) print(env['X']) @@ -170,7 +172,7 @@ Default(env.Alias('dummy', None)) """ % default_file) -SConstruct_file_line = test.python_file_line(test.workpath('SConstruct'), 6)[:-1] +SConstruct_file_line = test.python_file_line(test.workpath('SConstruct'), 7)[:-1] expect_stderr = """ scons: *** File path for option X does not exist: %(default_file)s @@ -209,6 +211,7 @@ PathVariable('X', 'X variable', r'%s', validator=PathVariable.PathIsDir), ) +DefaultEnvironment(tools=[]) # test speedup env = Environment(variables=opts) print(env['X']) @@ -257,6 +260,7 @@ PathVariable('X', 'X variable', r'%s', validator=PathVariable.PathIsDirCreate), ) +DefaultEnvironment(tools=[]) # test speedup env = Environment(variables=opts) print(env['X']) diff -Nru scons-4.4.0+dfsg/test/Variables/Variables.py scons-4.5.2+dfsg/test/Variables/Variables.py --- scons-4.4.0+dfsg/test/Variables/Variables.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/Variables/Variables.py 2023-03-21 16:17:04.000000000 +0000 @@ -28,6 +28,7 @@ test = TestSCons.TestSCons() test.write('SConstruct', """ +DefaultEnvironment(tools=[]) # test speedup env = Environment() print(env['CC']) print(" ".join(env['CCFLAGS'])) @@ -94,6 +95,7 @@ env.Append(CCFLAGS = '-g') +DefaultEnvironment(tools=[]) # test speedup env = Environment(variables=opts, tools=['default', test_tool]) Help('Variables settable in custom.py or on the command line:\\n' + opts.GenerateHelpText(env)) @@ -217,6 +219,7 @@ opts.Add('UNSPECIFIED', 'An option with no value') +DefaultEnvironment(tools=[]) # test speedup env = Environment(variables = opts) print(env['RELEASE_BUILD']) @@ -275,6 +278,7 @@ 'none', names = ['a','b','c',]) +DefaultEnvironment(tools=[]) # test speedup env = Environment(variables = opts) print(env['RELEASE_BUILD']) @@ -319,6 +323,7 @@ opts.Add('UNSPECIFIED', 'An option with no value') +DefaultEnvironment(tools=[]) # test speedup env = Environment(variables=opts) def compare(a, b): @@ -355,6 +360,7 @@ test.write('SConstruct', """ import SCons.Variables +DefaultEnvironment(tools=[]) # test speedup env1 = Environment(variables = Variables()) env2 = Environment(variables = SCons.Variables.Variables()) """) diff -Nru scons-4.4.0+dfsg/test/VariantDir/VariantDir.py scons-4.5.2+dfsg/test/VariantDir/VariantDir.py --- scons-4.4.0+dfsg/test/VariantDir/VariantDir.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/test/VariantDir/VariantDir.py 2023-03-21 16:17:04.000000000 +0000 @@ -1,6 +1,8 @@ #!/usr/bin/env python # -# __COPYRIGHT__ +# MIT License +# +# Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -21,8 +23,6 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" - import TestSCons _exe = TestSCons._exe @@ -56,7 +56,6 @@ test.subdir('work1', 'work2', 'work3') test.write(['work1', 'SConstruct'], """ -DefaultEnvironment(tools=[]) src = Dir('src') var2 = Dir('build/var2') var3 = Dir('build/var3') @@ -64,6 +63,7 @@ var5 = Dir('../build/var5') var6 = Dir('../build/var6') +DefaultEnvironment(tools=[]) # test speedup env = Environment(BUILD = 'build', SRC = 'src') VariantDir('build/var1', src) @@ -119,7 +119,7 @@ if fortran and env.Detect(fortran): if sys.platform =='win32': - env_prog=Environment(tools=['mingw'], + env_prog = Environment(tools=['mingw'], # BUILD = env['BUILD'], SRC = ENV['src'], CPPPATH=env['CPPPATH'], FORTRANPATH=env['FORTRANPATH'] ) else: @@ -333,6 +333,7 @@ # test.write(['work2', 'SConstruct'], """\ +DefaultEnvironment(tools=[]) # test speedup env = Environment() env.Program('prog.c') """) @@ -356,6 +357,7 @@ # test.write(['work2', 'SConstruct'], """\ +DefaultEnvironment(tools=[]) # test speedup env = Environment() VariantDir('build', '.') Export('env') diff -Nru scons-4.4.0+dfsg/testing/framework/TestCmd.py scons-4.5.2+dfsg/testing/framework/TestCmd.py --- scons-4.4.0+dfsg/testing/framework/TestCmd.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/testing/framework/TestCmd.py 2023-03-21 16:17:04.000000000 +0000 @@ -302,7 +302,12 @@ import hashlib import os import re -import psutil +try: + import psutil +except ImportError: + HAVE_PSUTIL = False +else: + HAVE_PSUTIL = True import shutil import signal import stat @@ -364,7 +369,7 @@ testprefix = 'testcmd.' if os.name in ('posix', 'nt'): - testprefix += "%s." % str(os.getpid()) + testprefix += f"{os.getpid()}." re_space = re.compile(r'\s') @@ -381,41 +386,45 @@ if name in ("?", ""): name = "" else: - name = " (" + name + ")" + name = f" ({name})" string = string + ("%s line %d of %s%s\n" % (atfrom, line, file, name)) atfrom = "\tfrom" return string -def clean_up_ninja_daemon(self, result_type): +def clean_up_ninja_daemon(self, result_type) -> None: """ - Kill any running scons daemon started by ninja and clean up it's working dir and - temp files. + Kill any running scons daemon started by ninja and clean up + + Working directory and temp files are removed. + Skipped if this platform doesn't have psutil (e.g. msys2 on Windows) """ - if self: - for path in Path(self.workdir).rglob('.ninja'): - daemon_dir = Path(tempfile.gettempdir()) / ( - "scons_daemon_" + str(hashlib.md5(str(path.resolve()).encode()).hexdigest()) - ) - pidfiles = [daemon_dir / 'pidfile', path / 'scons_daemon_dirty'] - for pidfile in pidfiles: - if pidfile.exists(): - with open(pidfile) as f: - try: - pid = int(f.read()) - os.kill(pid, signal.SIGINT) - except OSError: - pass - - while True: - if pid not in [proc.pid for proc in psutil.process_iter()]: - break - else: - time.sleep(0.1) - - if not self._preserve[result_type]: - if daemon_dir.exists(): - shutil.rmtree(daemon_dir) + if not self: + return + + for path in Path(self.workdir).rglob('.ninja'): + daemon_dir = Path(tempfile.gettempdir()) / ( + f"scons_daemon_{str(hashlib.md5(str(path.resolve()).encode()).hexdigest())}" + ) + pidfiles = [daemon_dir / 'pidfile', path / 'scons_daemon_dirty'] + for pidfile in pidfiles: + if pidfile.exists(): + with open(pidfile) as f: + try: + pid = int(f.read()) + os.kill(pid, signal.SIGINT) + except OSError: + pass + + while HAVE_PSUTIL: + if pid not in [proc.pid for proc in psutil.process_iter()]: + break + else: + time.sleep(0.1) + + if not self._preserve[result_type]: + if daemon_dir.exists(): + shutil.rmtree(daemon_dir) def fail_test(self=None, condition=True, function=None, skip=0, message=None): @@ -443,18 +452,18 @@ sep = " " if self is not None: if self.program: - of = " of " + self.program + of = f" of {self.program}" sep = "\n\t" if self.description: - desc = " [" + self.description + "]" + desc = f" [{self.description}]" sep = "\n\t" at = _caller(traceback.extract_stack(), skip) if message: - msg = "\t%s\n" % message + msg = f"\t{message}\n" else: msg = "" - sys.stderr.write("FAILED test" + of + desc + sep + at + msg) + sys.stderr.write(f"FAILED test{of}{desc}{sep}{at}{msg}") sys.exit(1) @@ -489,14 +498,14 @@ sep = " " if self is not None: if self.program: - of = " of " + self.program + of = f" of {self.program}" sep = "\n\t" if self.description: - desc = " [" + self.description + "]" + desc = f" [{self.description}]" sep = "\n\t" at = _caller(traceback.extract_stack(), skip) - sys.stderr.write("NO RESULT for test" + of + desc + sep + at) + sys.stderr.write(f"NO RESULT for test{of}{desc}{sep}{at}") sys.exit(2) @@ -593,7 +602,7 @@ if not is_List(res): res = res.split("\n") if len(lines) != len(res): - print("match_re: expected %d lines, found %d" % (len(res), len(lines))) + print(f"match_re: expected {len(res)} lines, found {len(lines)}") return None for i, (line, regex) in enumerate(zip(lines, res)): s = r"^{}$".format(regex) @@ -665,24 +674,24 @@ sm = difflib.SequenceMatcher(None, a, b) def comma(x1, x2): - return x1 + 1 == x2 and str(x2) or '%s,%s' % (x1 + 1, x2) + return x1 + 1 == x2 and str(x2) or f'{x1 + 1},{x2}' for op, a1, a2, b1, b2 in sm.get_opcodes(): if op == 'delete': - yield "{}d{}{}".format(comma(a1, a2), b1, lineterm) + yield f"{comma(a1, a2)}d{b1}{lineterm}" for l in a[a1:a2]: - yield '< ' + l + yield f"< {l}" elif op == 'insert': - yield "{}a{}{}".format(a1, comma(b1, b2), lineterm) + yield f"{a1}a{comma(b1, b2)}{lineterm}" for l in b[b1:b2]: - yield '> ' + l + yield f"> {l}" elif op == 'replace': - yield "{}c{}{}".format(comma(a1, a2), comma(b1, b2), lineterm) + yield f"{comma(a1, a2)}c{comma(b1, b2)}{lineterm}" for l in a[a1:a2]: - yield '< ' + l - yield '---{}'.format(lineterm) + yield f"< {l}" + yield f'---{lineterm}' for l in b[b1:b2]: - yield '> ' + l + yield f"> {l}" def diff_re(a, b, fromfile='', tofile='', @@ -712,10 +721,10 @@ msg = "Regular expression error in %s: %s" raise re.error(msg % (repr(s), e.args[0])) if not expr.search(bline): - result.append("%sc%s" % (i + 1, i + 1)) - result.append('< ' + repr(a[i])) + result.append(f"{i + 1}c{i + 1}") + result.append(f"< {repr(a[i])}") result.append('---') - result.append('> ' + repr(b[i])) + result.append(f"> {repr(b[i])}") return result @@ -728,7 +737,7 @@ for c in special: arg = arg.replace(c, slash + c) if re_space.search(arg): - arg = '"' + arg + '"' + arg = f"\"{arg}\"" return arg else: # Windows does not allow special characters in file names @@ -736,7 +745,7 @@ # the arg. def escape(arg): if re_space.search(arg): - arg = '"' + arg + '"' + arg = f"\"{arg}\"" return arg if os.name == 'java': @@ -1005,8 +1014,7 @@ class TestCmd: - """Class TestCmd - """ + """Class TestCmd""" def __init__( self, @@ -1040,7 +1048,10 @@ self.combine = combine self.universal_newlines = universal_newlines self.process = None - self.set_timeout(timeout) + # Two layers of timeout: one at the test class instance level, + # one set on an individual start() call (usually via a run() call) + self.timeout = timeout + self.start_timeout = None self.set_match_function(match, match_stdout, match_stderr) self.set_diff_function(diff, diff_stdout, diff_stderr) self._dirlist = [] @@ -1080,7 +1091,7 @@ self.cleanup() def __repr__(self): - return "%x" % id(self) + return f"{id(self):x}" banner_char = '=' banner_width = 80 @@ -1088,7 +1099,7 @@ def banner(self, s, width=None): if width is None: width = self.banner_width - return s + self.banner_char * (width - len(s)) + return f"{s:{self.banner_char}<{width}}" escape = staticmethod(escape) @@ -1128,7 +1139,7 @@ condition = self.condition if self._preserve[condition]: for dir in self._dirlist: - print("Preserved directory " + dir) + print(f"Preserved directory {dir}") else: list = self._dirlist[:] list.reverse() @@ -1164,7 +1175,7 @@ cmd = list(interpreter) + cmd if arguments: if isinstance(arguments, dict): - cmd.extend(["%s=%s" % (k, v) for k, v in arguments.items()]) + cmd.extend([f"{k}={v}" for k, v in arguments.items()]) return cmd if isinstance(arguments, str): arguments = arguments.split() @@ -1365,14 +1376,6 @@ dir = self.canonicalize(dir) os.rmdir(dir) - def _timeout(self): - self.process.terminate() - self.timer.cancel() - self.timer = None - - def set_timeout(self, timeout): - self.timeout = timeout - self.timer = None def parse_path(self, path, suppress_current=False): """Return a list with the single path components of path.""" @@ -1499,7 +1502,7 @@ interpreter=None, arguments=None, universal_newlines=None, - timeout=_Null, + timeout=None, **kw): """ Starts a program or script for the test environment. @@ -1527,11 +1530,8 @@ else: stderr_value = PIPE - if timeout is _Null: - timeout = self.timeout if timeout: - self.timer = threading.Timer(float(timeout), self._timeout) - self.timer.start() + self.start_timeout = timeout if sys.platform == 'win32': # Set this otherwist stdout/stderr pipes default to @@ -1583,14 +1583,32 @@ """ if popen is None: popen = self.process - stdout, stderr = popen.communicate() + if self.start_timeout: + timeout = self.start_timeout + # we're using a timeout from start, now reset it to default + self.start_timeout = None + else: + timeout = self.timeout + try: + stdout, stderr = popen.communicate(timeout=timeout) + except subprocess.TimeoutExpired: + popen.terminate() + stdout, stderr = popen.communicate() + + # this is instead of using Popen as a context manager: + if popen.stdout: + popen.stdout.close() + if popen.stderr: + popen.stderr.close() + try: + if popen.stdin: + popen.stdin.close() + finally: + popen.wait() stdout = self.fix_binary_stream(stdout) stderr = self.fix_binary_stream(stderr) - if self.timer: - self.timer.cancel() - self.timer = None self.status = popen.returncode self.process = None self._stdout.append(stdout or '') @@ -1602,7 +1620,7 @@ chdir=None, stdin=None, universal_newlines=None, - timeout=_Null): + timeout=None): """Runs a test of the program or script for the test environment. Output and error output are saved for future retrieval via @@ -1628,8 +1646,10 @@ if not os.path.isabs(chdir): chdir = os.path.join(self.workpath(chdir)) if self.verbose: - sys.stderr.write("chdir(" + chdir + ")\n") + sys.stderr.write(f"chdir({chdir})\n") os.chdir(chdir) + if not timeout: + timeout = self.timeout p = self.start(program=program, interpreter=interpreter, arguments=arguments, @@ -1647,17 +1667,29 @@ # subclasses that redefine .finish(). We could abstract this # into Yet Another common method called both here and by .finish(), # but that seems ill-thought-out. - stdout, stderr = p.communicate(input=stdin) - if self.timer: - self.timer.cancel() - self.timer = None + try: + stdout, stderr = p.communicate(input=stdin, timeout=timeout) + except subprocess.TimeoutExpired: + p.terminate() + stdout, stderr = p.communicate() + + # this is instead of using Popen as a context manager: + if p.stdout: + p.stdout.close() + if p.stderr: + p.stderr.close() + try: + if p.stdin: + p.stdin.close() + finally: + p.wait() + self.status = p.returncode self.process = None stdout = self.fix_binary_stream(stdout) stderr = self.fix_binary_stream(stderr) - self._stdout.append(stdout or '') self._stderr.append(stderr or '') @@ -1668,12 +1700,12 @@ write('============ STATUS: %d\n' % self.status) out = self.stdout() if out or self.verbose >= 3: - write('============ BEGIN STDOUT (len=%d):\n' % len(out)) + write(f'============ BEGIN STDOUT (len={len(out)}):\n') write(out) write('============ END STDOUT\n') err = self.stderr() if err or self.verbose >= 3: - write('============ BEGIN STDERR (len=%d)\n' % len(err)) + write(f'============ BEGIN STDERR (len={len(err)})\n') write(err) write('============ END STDERR\n') @@ -1687,12 +1719,16 @@ time.sleep(seconds) def stderr(self, run=None) -> Optional[str]: - """Returns the error output from the specified run number. + """Returns the stored standard error output from a given run. - If there is no specified run number, then returns the error - output of the last run. If the run number is less than zero, - then returns the error output from that many runs back from the - current run. + Args: + run: run number to select. If run number is omitted, + return the standard error of the most recent run. + If negative, use as a relative offset, e.g. -2 + means the run two prior to the most recent. + + Returns: + selected sterr string or None if there are no stored runs. """ if not run: run = len(self._stderr) @@ -1709,13 +1745,12 @@ Args: run: run number to select. If run number is omitted, - return the standard output of the most recent run. - If negative, use as a relative offset, so that -2 - means the run two prior to the most recent. + return the standard output of the most recent run. + If negative, use as a relative offset, e.g. -2 + means the run two prior to the most recent. Returns: - selected stdout string or None if there are no - stored runs. + selected stdout string or None if there are no stored runs. """ if not run: run = len(self._stdout) diff -Nru scons-4.4.0+dfsg/testing/framework/TestCmdTests.py scons-4.5.2+dfsg/testing/framework/TestCmdTests.py --- scons-4.4.0+dfsg/testing/framework/TestCmdTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/testing/framework/TestCmdTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -19,8 +19,6 @@ # AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. -__author__ = "Steven Knight " -__revision__ = "TestCmdTests.py 1.3.D001 2010/06/03 12:58:27 knight" import os import shutil @@ -42,6 +40,7 @@ sys.path = sys.path[1:] import TestCmd +from TestCmd import _python_ def _is_readable(path): # XXX this doesn't take into account UID, it assumes it's our file @@ -97,10 +96,10 @@ textx = fmt % (t.scriptx, t.scriptx) if sys.platform == 'win32': textx = textx.replace('%', '%%') - textx = '@python -c "%s"' % textx + ' %1 %2 %3 %4 %5 %6 %7 %8 %9\n' + textx = f"@{_python_} -c \"{textx}\" %1 %2 %3 %4 %5 %6 %7 %8 %9\n" else: - textx = '#! /usr/bin/env python\n' + textx + '\n' - text1 = 'A first line to be ignored!\n' + fmt % (t.script1, t.script1) + textx = f"#!{_python_}\n{textx}\n" + text1 = f"A first line to be ignored!\n{fmt % (t.script1, t.script1)}" textout = fmtout % t.scriptout texterr = fmterr % t.scripterr @@ -151,33 +150,27 @@ python = sys.executable _stdout, _stderr, _status = self.call_python(indata, python) assert _status == status, ( - "status = %s, expected %s\n" % (str(_status), str(status)) - + "STDOUT ===================\n" - + _stdout - + "STDERR ===================\n" - + _stderr + f"status = {_status}, expected {status}\n" + f"STDOUT ===================\n{_stdout}" + f"STDERR ===================\n{_stderr}" ) assert _stdout == stdout, ( - "Expected STDOUT ==========\n" - + stdout - + "Actual STDOUT ============\n" - + _stdout - + "STDERR ===================\n" - + _stderr + f"Expected STDOUT ==========\n{stdout}" + f"Actual STDOUT ============\n{_stdout}" + f"STDERR ===================\n{_stderr}" ) assert _stderr == stderr, ( - "Expected STDERR ==========\n" - + stderr - + "Actual STDERR ============\n" - + _stderr + f"Expected STDERR ==========\n{stderr}" + f"Actual STDERR ============\n{_stderr}" ) def run_match(self, content, *args): expect = "%s: %s: %s: %s\n" % args content = self.translate_newlines(to_str(content)) - assert content == expect, \ - "Expected %s ==========\n" % args[1] + expect + \ - "Actual %s ============\n" % args[1] + content + assert content == expect, ( + f"Expected {args[1] + expect} ==========\n" + f"Actual {args[1] + content} ============\n" + ) @@ -238,12 +231,12 @@ def test_atexit(self): """Test cleanup when atexit is used""" - self.popen_python("""\ + self.popen_python(f"""\ import atexit import sys import TestCmd -sys.path = [r'%s'] + sys.path +sys.path = [r'{self.orig_cwd}'] + sys.path @atexit.register def cleanup(): @@ -251,7 +244,7 @@ result = TestCmd.TestCmd(workdir='') sys.exit(0) -""" % self.orig_cwd, stdout='cleanup()\n') +""", stdout='cleanup()\n') class chmod_TestCase(TestCmdTestCase): @@ -273,17 +266,17 @@ test.chmod(['sub', 'file2'], stat.S_IWRITE) file1_mode = stat.S_IMODE(os.stat(wdir_file1)[stat.ST_MODE]) - assert file1_mode == 0o444, '0%o' % file1_mode + assert file1_mode == 0o444, f'0{file1_mode:o}' file2_mode = stat.S_IMODE(os.stat(wdir_sub_file2)[stat.ST_MODE]) - assert file2_mode == 0o666, '0%o' % file2_mode + assert file2_mode == 0o666, f'0{file2_mode:o}' test.chmod('file1', stat.S_IWRITE) test.chmod(wdir_sub_file2, stat.S_IREAD) file1_mode = stat.S_IMODE(os.stat(wdir_file1)[stat.ST_MODE]) - assert file1_mode == 0o666, '0%o' % file1_mode + assert file1_mode == 0o666, f'0{file1_mode:o}' file2_mode = stat.S_IMODE(os.stat(wdir_sub_file2)[stat.ST_MODE]) - assert file2_mode == 0o444, '0%o' % file2_mode + assert file2_mode == 0o444, f'0{file2_mode:o}' else: @@ -291,17 +284,17 @@ test.chmod(['sub', 'file2'], 0o760) file1_mode = stat.S_IMODE(os.stat(wdir_file1)[stat.ST_MODE]) - assert file1_mode == 0o700, '0%o' % file1_mode + assert file1_mode == 0o700, f'0{file1_mode:o}' file2_mode = stat.S_IMODE(os.stat(wdir_sub_file2)[stat.ST_MODE]) - assert file2_mode == 0o760, '0%o' % file2_mode + assert file2_mode == 0o760, f'0{file2_mode:o}' test.chmod('file1', 0o765) test.chmod(wdir_sub_file2, 0o567) file1_mode = stat.S_IMODE(os.stat(wdir_file1)[stat.ST_MODE]) - assert file1_mode == 0o765, '0%o' % file1_mode + assert file1_mode == 0o765, f'0{file1_mode:o}' file2_mode = stat.S_IMODE(os.stat(wdir_sub_file2)[stat.ST_MODE]) - assert file2_mode == 0o567, '0%o' % file2_mode + assert file2_mode == 0o567, f'0{file2_mode:o}' @@ -335,7 +328,7 @@ combine = 1) output = test.stdout() if output is not None: - raise IndexError("got unexpected output:\n\t`%s'\n" % output) + raise IndexError(f"got unexpected output:\n\t`{output}'\n") # The underlying system subprocess implementations can combine # stdout and stderr in different orders, so we accomodate both. @@ -414,8 +407,8 @@ def test_diff_custom_function(self): """Test diff() using a custom function""" - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd def my_diff(a, b): return [ @@ -428,7 +421,7 @@ test = TestCmd.TestCmd(diff = my_diff) test.diff("a\\nb1\\nc\\n", "a\\nb2\\nc\\n", "STDOUT") sys.exit(0) -""" % self.orig_cwd, +""", stdout = """\ STDOUT========================================================================== ***** @@ -439,13 +432,13 @@ """) def test_diff_string(self): - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(diff = 'diff_re') test.diff("a\\nb1\\nc\\n", "a\\nb2\\nc\\n", 'STDOUT') sys.exit(0) -""" % self.orig_cwd, +""", stdout = """\ STDOUT========================================================================== 2c2 @@ -456,12 +449,12 @@ def test_error(self): """Test handling a compilation error in TestCmd.diff_re()""" - script_input = """import sys -sys.path = [r'%s'] + sys.path + script_input = f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd assert TestCmd.diff_re([r"a.*(e"], ["abcde"]) sys.exit(0) -""" % self.orig_cwd +""" stdout, stderr, status = self.call_python(script_input) assert status == 1, status expect1 = "Regular expression error in '^a.*(e$': missing )" @@ -471,8 +464,8 @@ def test_simple_diff_static_method(self): """Test calling the TestCmd.TestCmd.simple_diff() static method""" - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd result = TestCmd.TestCmd.simple_diff(['a', 'b', 'c', 'e', 'f1'], ['a', 'c', 'd', 'e', 'f2']) @@ -480,12 +473,12 @@ expect = ['2d1', '< b', '3a3', '> d', '5c5', '< f1', '---', '> f2'] assert result == expect, result sys.exit(0) -""" % self.orig_cwd) +""") def test_context_diff_static_method(self): """Test calling the TestCmd.TestCmd.context_diff() static method""" - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd result = TestCmd.TestCmd.context_diff(['a\\n', 'b\\n', 'c\\n', 'e\\n', 'f1\\n'], ['a\\n', 'c\\n', 'd\\n', 'e\\n', 'f2\\n']) @@ -509,12 +502,12 @@ ] assert result == expect, result sys.exit(0) -""" % self.orig_cwd) +""") def test_unified_diff_static_method(self): """Test calling the TestCmd.TestCmd.unified_diff() static method""" - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd result = TestCmd.TestCmd.unified_diff(['a\\n', 'b\\n', 'c\\n', 'e\\n', 'f1\\n'], ['a\\n', 'c\\n', 'd\\n', 'e\\n', 'f2\\n']) @@ -533,12 +526,12 @@ ] assert result == expect, result sys.exit(0) -""" % self.orig_cwd) +""") def test_diff_re_static_method(self): """Test calling the TestCmd.TestCmd.diff_re() static method""" - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd result = TestCmd.TestCmd.diff_re(['a', 'b', 'c', '.', 'f1'], ['a', 'c', 'd', 'e', 'f2']) @@ -559,20 +552,20 @@ ] assert result == expect, result sys.exit(0) -""" % self.orig_cwd) +""") class diff_stderr_TestCase(TestCmdTestCase): def test_diff_stderr_default(self): """Test diff_stderr() default behavior""" - self.popen_python(r"""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(fr"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd() test.diff_stderr('a\nb1\nc\n', 'a\nb2\nc\n') sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ 2c2 < b1 @@ -582,9 +575,9 @@ def test_diff_stderr_not_affecting_diff_stdout(self): """Test diff_stderr() not affecting diff_stdout() behavior""" - self.popen_python(r""" + self.popen_python(fr""" import sys -sys.path = [r'%s'] + sys.path +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(diff_stderr='diff_re') print("diff_stderr:") @@ -592,7 +585,7 @@ print("diff_stdout:") test.diff_stdout('a\nb.\nc\n', 'a\nbb\nc\n') sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ diff_stderr: diff_stdout: @@ -604,15 +597,15 @@ def test_diff_stderr_custom_function(self): """Test diff_stderr() using a custom function""" - self.popen_python(r"""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(fr"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd def my_diff(a, b): return ["a:"] + a + ["b:"] + b test = TestCmd.TestCmd(diff_stderr=my_diff) test.diff_stderr('abc', 'def') sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ a: abc @@ -622,13 +615,13 @@ def test_diff_stderr_TestCmd_function(self): """Test diff_stderr() using a TestCmd function""" - self.popen_python(r"""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(fr"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(diff_stderr = TestCmd.diff_re) test.diff_stderr('a\n.\n', 'b\nc\n') sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ 1c1 < 'a' @@ -638,13 +631,13 @@ def test_diff_stderr_static_method(self): """Test diff_stderr() using a static method""" - self.popen_python(r"""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(fr"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(diff_stderr=TestCmd.TestCmd.diff_re) test.diff_stderr('a\n.\n', 'b\nc\n') sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ 1c1 < 'a' @@ -654,13 +647,13 @@ def test_diff_stderr_string(self): """Test diff_stderr() using a string to fetch the diff method""" - self.popen_python(r"""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(fr"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(diff_stderr='diff_re') test.diff_stderr('a\n.\n', 'b\nc\n') sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ 1c1 < 'a' @@ -673,13 +666,13 @@ class diff_stdout_TestCase(TestCmdTestCase): def test_diff_stdout_default(self): """Test diff_stdout() default behavior""" - self.popen_python(r"""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(fr"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd() test.diff_stdout('a\nb1\nc\n', 'a\nb2\nc\n') sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ 2c2 < b1 @@ -689,9 +682,9 @@ def test_diff_stdout_not_affecting_diff_stderr(self): """Test diff_stdout() not affecting diff_stderr() behavior""" - self.popen_python(r""" + self.popen_python(fr""" import sys -sys.path = [r'%s'] + sys.path +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(diff_stdout='diff_re') print("diff_stdout:") @@ -699,7 +692,7 @@ print("diff_stderr:") test.diff_stderr('a\nb.\nc\n', 'a\nbb\nc\n') sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ diff_stdout: diff_stderr: @@ -711,15 +704,15 @@ def test_diff_stdout_custom_function(self): """Test diff_stdout() using a custom function""" - self.popen_python(r"""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(fr"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd def my_diff(a, b): return ["a:"] + a + ["b:"] + b test = TestCmd.TestCmd(diff_stdout=my_diff) test.diff_stdout('abc', 'def') sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ a: abc @@ -729,13 +722,13 @@ def test_diff_stdout_TestCmd_function(self): """Test diff_stdout() using a TestCmd function""" - self.popen_python(r"""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(fr"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(diff_stdout = TestCmd.diff_re) test.diff_stdout('a\n.\n', 'b\nc\n') sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ 1c1 < 'a' @@ -745,13 +738,13 @@ def test_diff_stdout_static_method(self): """Test diff_stdout() using a static method""" - self.popen_python(r"""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(fr"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(diff_stdout=TestCmd.TestCmd.diff_re) test.diff_stdout('a\n.\n', 'b\nc\n') sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ 1c1 < 'a' @@ -761,13 +754,13 @@ def test_diff_stdout_string(self): """Test diff_stdout() using a string to fetch the diff method""" - self.popen_python(r"""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(fr"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(diff_stdout='diff_re') test.diff_stdout('a\n.\n', 'b\nc\n') sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ 1c1 < 'a' @@ -787,12 +780,12 @@ 'fail_test': "FAILED test at line 5 of \n", 'no_result': "NO RESULT for test at line 5 of \n"} global ExitError - input = """import sys -sys.path = [r'%s'] + sys.path + input = f"""import sys +sys.path = [r'{cwd}'] + sys.path import TestCmd -test = TestCmd.TestCmd(workdir = '%s') -test.%s() -""" % (cwd, tempdir, condition) +test = TestCmd.TestCmd(workdir = '{tempdir}') +test.{condition}() +""" stdout, stderr, status = self.call_python(input, python="python") if close_true[condition]: unexpected = (status != 0) @@ -805,7 +798,7 @@ msg = "Expected exit status %d, got %d\n" raise ExitError(msg % (exit_status[condition], status)) if stderr != result_string[condition]: - msg = "Expected error output:\n%sGot error output:\n%s" + msg = "Expected error output:\n%s\nGot error output:\n%s" raise ExitError(msg % (result_string[condition], stderr)) if preserved: if not os.path.exists(tempdir): @@ -862,40 +855,40 @@ os.chdir(run_env.workdir) # Everything before this prepared our "source directory." # Now do the real test. - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd TestCmd.fail_test(condition = 1) -""" % self.orig_cwd, status = 1, stderr = "FAILED test at line 4 of \n") +""", status = 1, stderr = "FAILED test at line 4 of \n") - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(program = 'run', interpreter = 'python', workdir = '') test.run() test.fail_test(condition = (test.status == 0)) -""" % self.orig_cwd, status = 1, stderr = "FAILED test of %s\n\tat line 6 of \n" % run_env.workpath('run')) +""", status = 1, stderr = f"FAILED test of {run_env.workpath('run')}\n\tat line 6 of \n") - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(program = 'run', interpreter = 'python', description = 'xyzzy', workdir = '') test.run() test.fail_test(condition = (test.status == 0)) -""" % self.orig_cwd, status = 1, stderr = "FAILED test of %s [xyzzy]\n\tat line 6 of \n" % run_env.workpath('run')) +""", status = 1, stderr = f"FAILED test of {run_env.workpath('run')} [xyzzy]\n\tat line 6 of \n") - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(program = 'run', interpreter = 'python', workdir = '') test.run() def xxx(): sys.stderr.write("printed on failure\\n") test.fail_test(condition = (test.status == 0), function = xxx) -""" % self.orig_cwd, status = 1, stderr = "printed on failure\nFAILED test of %s\n\tat line 8 of \n" % run_env.workpath('run')) +""", status = 1, stderr = f"printed on failure\nFAILED test of {run_env.workpath('run')}\n\tat line 8 of \n") - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd def test1(self): self.run() @@ -903,10 +896,10 @@ def test2(self): test1(self) test2(TestCmd.TestCmd(program = 'run', interpreter = 'python', workdir = '')) -""" % self.orig_cwd, status = 1, stderr = "FAILED test of %s\n\tat line 6 of (test1)\n\tfrom line 8 of (test2)\n\tfrom line 9 of \n" % run_env.workpath('run')) +""", status = 1, stderr = f"FAILED test of {run_env.workpath('run')}\n\tat line 6 of (test1)\n\tfrom line 8 of (test2)\n\tfrom line 9 of \n") - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd def test1(self): self.run() @@ -914,7 +907,7 @@ def test2(self): test1(self) test2(TestCmd.TestCmd(program = 'run', interpreter = 'python', workdir = '')) -""" % self.orig_cwd, status = 1, stderr = "FAILED test of %s\n\tat line 8 of (test2)\n\tfrom line 9 of \n" % run_env.workpath('run')) +""", status = 1, stderr = f"FAILED test of {run_env.workpath('run')}\n\tat line 8 of (test2)\n\tfrom line 9 of \n") @@ -1062,12 +1055,12 @@ # Everything before this prepared our "source directory." # Now do the real test. try: - script_input = """import sys -sys.path = [r'%s'] + sys.path + script_input = f"""import sys +sys.path = [r'{cwd}'] + sys.path import TestCmd assert TestCmd.match_re_dotall("abcde", r"a.*(e") sys.exit(0) -""" % cwd +""" stdout, stderr, status = self.call_python(script_input) assert status == 1, status expect1 = "Regular expression error in '^a.*(e$': missing )" @@ -1135,12 +1128,14 @@ # Everything before this prepared our "source directory." # Now do the real test. try: - script_input = """import sys -sys.path = [r'%s'] + sys.path + script_input = f"""import sys +sys.path = [r'{cwd}'] + sys.path import TestCmd -assert TestCmd.match_re("abcde\\n", "a.*(e\\n") +assert TestCmd.match_re("abcde\ +", "a.*(e\ +") sys.exit(0) -""" % cwd +""" stdout, stderr, status = self.call_python(script_input) assert status == 1, status expect1 = "Regular expression error in '^a.*(e$': missing )" @@ -1345,40 +1340,40 @@ os.chdir(run_env.workdir) # Everything before this prepared our "source directory." # Now do the real test. - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd TestCmd.no_result(condition = 1) -""" % self.orig_cwd, status = 2, stderr = "NO RESULT for test at line 4 of \n") +""", status = 2, stderr = "NO RESULT for test at line 4 of \n") - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(program = 'run', interpreter = 'python', workdir = '') test.run() test.no_result(condition = (test.status == 0)) -""" % self.orig_cwd, status = 2, stderr = "NO RESULT for test of %s\n\tat line 6 of \n" % run_env.workpath('run')) +""", status = 2, stderr = f"NO RESULT for test of {run_env.workpath('run')}\n\tat line 6 of \n") - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(program = 'run', interpreter = 'python', description = 'xyzzy', workdir = '') test.run() test.no_result(condition = (test.status == 0)) -""" % self.orig_cwd, status = 2, stderr = "NO RESULT for test of %s [xyzzy]\n\tat line 6 of \n" % run_env.workpath('run')) +""", status = 2, stderr = f"NO RESULT for test of {run_env.workpath('run')} [xyzzy]\n\tat line 6 of \n") - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(program = 'run', interpreter = 'python', workdir = '') test.run() def xxx(): sys.stderr.write("printed on no result\\n") test.no_result(condition = (test.status == 0), function = xxx) -""" % self.orig_cwd, status = 2, stderr = "printed on no result\nNO RESULT for test of %s\n\tat line 8 of \n" % run_env.workpath('run')) +""", status = 2, stderr = f"printed on no result\nNO RESULT for test of {run_env.workpath('run')}\n\tat line 8 of \n") - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd def test1(self): self.run() @@ -1386,10 +1381,10 @@ def test2(self): test1(self) test2(TestCmd.TestCmd(program = 'run', interpreter = 'python', workdir = '')) -""" % self.orig_cwd, status = 2, stderr = "NO RESULT for test of %s\n\tat line 6 of (test1)\n\tfrom line 8 of (test2)\n\tfrom line 9 of \n" % run_env.workpath('run')) +""", status = 2, stderr = f"NO RESULT for test of {run_env.workpath('run')}\n\tat line 6 of (test1)\n\tfrom line 8 of (test2)\n\tfrom line 9 of \n") - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd def test1(self): self.run() @@ -1397,7 +1392,7 @@ def test2(self): test1(self) test2(TestCmd.TestCmd(program = 'run', interpreter = 'python', workdir = '')) -""" % self.orig_cwd, status = 2, stderr = "NO RESULT for test of %s\n\tat line 8 of (test2)\n\tfrom line 9 of \n" % run_env.workpath('run')) +""", status = 2, stderr = f"NO RESULT for test of {run_env.workpath('run')}\n\tat line 8 of (test2)\n\tfrom line 9 of \n") @@ -1412,29 +1407,29 @@ os.chdir(run_env.workdir) # Everything before this prepared our "source directory." # Now do the real test. - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd TestCmd.pass_test(condition = 1) -""" % self.orig_cwd, stderr = "PASSED\n") +""", stderr = "PASSED\n") - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(program = 'run', interpreter = 'python', workdir = '') test.run() test.pass_test(condition = (test.status == 0)) -""" % self.orig_cwd, stderr = "PASSED\n") +""", stderr = "PASSED\n") - self.popen_python("""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd(program = 'run', interpreter = 'python', workdir = '') test.run() def brag(): sys.stderr.write("printed on success\\n") test.pass_test(condition = (test.status == 0), function = brag) -""" % self.orig_cwd, stderr = "printed on success\nPASSED\n") +""", stderr = "printed on success\nPASSED\n") # TODO(sgk): SHOULD ALSO TEST FAILURE CONDITIONS @@ -1453,7 +1448,7 @@ else: test.cleanup() o = io.getvalue() - assert o == stdout, "o = `%s', stdout = `%s'" % (o, stdout) + assert o == stdout, f"o = `{o}', stdout = `{stdout}'" finally: sys.stdout = save @@ -1474,7 +1469,7 @@ try: test.write('file2', "Test file #2\n") test.preserve('pass_test') - cleanup_test(test, 'pass_test', "Preserved directory %s\n" % wdir) + cleanup_test(test, 'pass_test', f"Preserved directory {wdir}\n") assert os.path.isdir(wdir) cleanup_test(test, 'fail_test') assert not os.path.exists(wdir) @@ -1488,7 +1483,7 @@ try: test.write('file3', "Test file #3\n") test.preserve('fail_test') - cleanup_test(test, 'fail_test', "Preserved directory %s\n" % wdir) + cleanup_test(test, 'fail_test', f"Preserved directory {wdir}\n") assert os.path.isdir(wdir) cleanup_test(test, 'pass_test') assert not os.path.exists(wdir) @@ -1502,9 +1497,9 @@ try: test.write('file4', "Test file #4\n") test.preserve('fail_test', 'no_result') - cleanup_test(test, 'fail_test', "Preserved directory %s\n" % wdir) + cleanup_test(test, 'fail_test', f"Preserved directory {wdir}\n") assert os.path.isdir(wdir) - cleanup_test(test, 'no_result', "Preserved directory %s\n" % wdir) + cleanup_test(test, 'no_result', f"Preserved directory {wdir}\n") assert os.path.isdir(wdir) cleanup_test(test, 'pass_test') assert not os.path.exists(wdir) @@ -1517,11 +1512,11 @@ wdir = test.workdir try: test.preserve() - cleanup_test(test, 'pass_test', "Preserved directory %s\n" % wdir) + cleanup_test(test, 'pass_test', f"Preserved directory {wdir}\n") assert os.path.isdir(wdir) - cleanup_test(test, 'fail_test', "Preserved directory %s\n" % wdir) + cleanup_test(test, 'fail_test', f"Preserved directory {wdir}\n") assert os.path.isdir(wdir) - cleanup_test(test, 'no_result', "Preserved directory %s\n" % wdir) + cleanup_test(test, 'no_result', f"Preserved directory {wdir}\n") assert os.path.isdir(wdir) finally: if os.path.exists(wdir): @@ -1626,7 +1621,7 @@ else: raise Exception("did not catch expected SConsEnvironmentError") - assert os.path.isdir(s_d_o), "%s is gone?" % s_d_o + assert os.path.isdir(s_d_o), f"{s_d_o} is gone?" try: test.rmdir(['sub']) @@ -1635,21 +1630,21 @@ else: raise Exception("did not catch expected SConsEnvironmentError") - assert os.path.isdir(s_d_o), "%s is gone?" % s_d_o + assert os.path.isdir(s_d_o), f"{s_d_o} is gone?" test.rmdir(['sub', 'dir', 'one']) - assert not os.path.exists(s_d_o), "%s exists?" % s_d_o - assert os.path.isdir(s_d), "%s is gone?" % s_d + assert not os.path.exists(s_d_o), f"{s_d_o} exists?" + assert os.path.isdir(s_d), f"{s_d} is gone?" test.rmdir(['sub', 'dir']) - assert not os.path.exists(s_d), "%s exists?" % s_d - assert os.path.isdir(s), "%s is gone?" % s + assert not os.path.exists(s_d), f"{s_d} exists?" + assert os.path.isdir(s), f"{s} is gone?" test.rmdir('sub') - assert not os.path.exists(s), "%s exists?" % s + assert not os.path.exists(s), f"{s} exists?" @@ -1854,7 +1849,7 @@ o = sys.stdout.getvalue() assert o == '', o e = sys.stderr.getvalue() - expect = 'python "%s" "arg1 arg2"\n' % t.script_path + expect = f'python "{t.script_path}" "arg1 arg2\"\n' assert expect == e, (expect, e) testx = TestCmd.TestCmd(program = t.scriptx, @@ -1863,7 +1858,7 @@ with closing(StringIO()) as sys.stdout, closing(StringIO()) as sys.stderr: testx.run(arguments = ['arg1 arg2']) - expect = '"%s" "arg1 arg2"\n' % t.scriptx_path + expect = f'"{t.scriptx_path}" "arg1 arg2\"\n' o = sys.stdout.getvalue() assert o == '', o e = sys.stderr.getvalue() @@ -1907,7 +1902,7 @@ o = sys.stdout.getvalue() assert expect == o, (expect, o) - expect = 'python "%s" "arg1 arg2"\n' % t.script_path + expect = f'python "{t.script_path}" "arg1 arg2\"\n' e = sys.stderr.getvalue() assert e == expect, (e, expect) @@ -1926,7 +1921,7 @@ o = sys.stdout.getvalue() assert expect == o, (expect, o) - expect = '"%s" "arg1 arg2"\n' % t.scriptx_path + expect = f'"{t.scriptx_path}" "arg1 arg2\"\n' e = sys.stderr.getvalue() assert e == expect, (e, expect) @@ -1947,7 +1942,7 @@ assert expect == o, (expect, o) e = sys.stderr.getvalue() - expect = 'python "%s" "arg1 arg2"\n' % t.scriptout_path + expect = f'python "{t.scriptout_path}" "arg1 arg2\"\n' assert e == expect, (e, expect) test = TestCmd.TestCmd(program = t.scriptout, @@ -1966,7 +1961,7 @@ assert expect == o, (expect, o) e = sys.stderr.getvalue() - expect = 'python "%s" "arg1 arg2"\n' % t.scriptout_path + expect = f'python "{t.scriptout_path}" "arg1 arg2\"\n' assert e == expect, (e, expect) # Test letting TestCmd() pick up verbose = 2 from the environment. @@ -1988,7 +1983,7 @@ o = sys.stdout.getvalue() assert expect == o, (expect, o) - expect = 'python "%s" "arg1 arg2"\n' % t.script_path + expect = f'python "{t.script_path}" "arg1 arg2\"\n' e = sys.stderr.getvalue() assert e == expect, (e, expect) @@ -2006,7 +2001,7 @@ o = sys.stdout.getvalue() assert expect == o, (expect, o) - expect = '"%s" "arg1 arg2"\n' % t.scriptx_path + expect = f'"{t.scriptx_path}" "arg1 arg2\"\n' e = sys.stderr.getvalue() assert e == expect, (e, expect) @@ -2024,7 +2019,7 @@ o = sys.stdout.getvalue() assert o == '', o e = sys.stderr.getvalue() - expect = 'python "%s" "arg1 arg2"\n' % t.script_path + expect = f'python "{t.script_path}" "arg1 arg2\"\n' assert expect == e, (expect, e) testx = TestCmd.TestCmd(program = t.scriptx, @@ -2033,7 +2028,7 @@ with closing(StringIO()) as sys.stdout, closing(StringIO()) as sys.stderr: testx.run(arguments = ['arg1 arg2']) - expect = '"%s" "arg1 arg2"\n' % t.scriptx_path + expect = f'"{t.scriptx_path}" "arg1 arg2\"\n' o = sys.stdout.getvalue() assert o == '', o e = sys.stderr.getvalue() @@ -2050,21 +2045,20 @@ class set_diff_function_TestCase(TestCmdTestCase): def test_set_diff_function(self): """Test set_diff_function()""" - self.popen_python(r"""import sys -sys.path = [r'%s'] + sys.path + self.popen_python(fr"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd() test.diff("a\n", "a\n") test.set_diff_function('diff_re') test.diff(".\n", "a\n") sys.exit(0) -""" % self.orig_cwd) +""") def test_set_diff_function_stdout(self): """Test set_diff_function(): stdout""" - self.popen_python("""\ -import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd() print("diff:") @@ -2077,7 +2071,7 @@ print("diff_stdout:") test.diff_stdout(".\\n", "a\\n") sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ diff: diff_stdout: @@ -2091,9 +2085,8 @@ def test_set_diff_function_stderr(self): """Test set_diff_function(): stderr """ - self.popen_python("""\ -import sys -sys.path = [r'%s'] + sys.path + self.popen_python(f"""import sys +sys.path = [r'{self.orig_cwd}'] + sys.path import TestCmd test = TestCmd.TestCmd() print("diff:") @@ -2106,7 +2099,7 @@ print("diff_stderr:") test.diff_stderr(".\\n", "a\\n") sys.exit(0) -""" % self.orig_cwd, +""", stdout="""\ diff: diff_stderr: @@ -2173,13 +2166,13 @@ test.sleep() end = time.perf_counter() diff = end - start - assert diff > 0.9, "only slept %f seconds (start %f, end %f), not default" % (diff, start, end) + assert diff > 0.9, f"only slept {diff:f} seconds (start {start:f}, end {end:f}), not default" start = time.perf_counter() test.sleep(3) end = time.perf_counter() diff = end - start - assert diff > 2.9, "only slept %f seconds (start %f, end %f), not 3" % (diff, start, end) + assert diff > 2.9, f"only slept {diff:f} seconds (start {start:f}, end {end:f}), not 3" @@ -2208,7 +2201,8 @@ except IndexError: pass else: - raise IndexError("got unexpected output:\n" + output) + if output is not None: + raise IndexError(f"got unexpected output:\n{output}") test.program_set('run1') test.run(arguments = 'foo bar') test.program_set('run2') @@ -2293,8 +2287,7 @@ t.recv_script = 'script_recv' t.recv_script_path = t.run_env.workpath(t.sub_dir, t.recv_script) t.recv_out_path = t.run_env.workpath('script_recv.out') - text = """\ -import os + text = f"""import os import sys class Unbuffered: @@ -2311,19 +2304,26 @@ sys.stdout.write('script_recv: STDOUT\\n') sys.stderr.write('script_recv: STDERR\\n') -with open(r'%s', 'wb') as logfp: - while 1: +with open(r'{t.recv_out_path}', 'w') as logfp: + while True: line = sys.stdin.readline() if not line: break logfp.write('script_recv: ' + line) sys.stdout.write('script_recv: STDOUT: ' + line) sys.stderr.write('script_recv: STDERR: ' + line) -""" % t.recv_out_path +""" t.run_env.write(t.recv_script_path, text) os.chmod(t.recv_script_path, 0o644) # XXX UNIX-specific return t + def _cleanup(self, popen): + """Quiet Python ResourceWarning after wait()""" + if popen.stdout: + popen.stdout.close() + if popen.stderr: + popen.stderr.close() + def test_start(self): """Test start()""" @@ -2343,6 +2343,7 @@ self.run_match(p.stderr.read(), t.script, "STDERR", t.workdir, repr([])) p.wait() + self._cleanup(p) p = test.start(arguments='arg1 arg2 arg3') self.run_match(p.stdout.read(), t.script, "STDOUT", t.workdir, @@ -2350,6 +2351,7 @@ self.run_match(p.stderr.read(), t.script, "STDERR", t.workdir, repr(['arg1', 'arg2', 'arg3'])) p.wait() + self._cleanup(p) p = test.start(program=t.scriptx, arguments='foo') self.run_match(p.stdout.read(), t.scriptx, "STDOUT", t.workdir, @@ -2357,6 +2359,7 @@ self.run_match(p.stderr.read(), t.scriptx, "STDERR", t.workdir, repr(['foo'])) p.wait() + self._cleanup(p) p = test.start(program=t.script1, interpreter=['python', '-x']) self.run_match(p.stdout.read(), t.script1, "STDOUT", t.workdir, @@ -2364,9 +2367,11 @@ self.run_match(p.stderr.read(), t.script1, "STDERR", t.workdir, repr([])) p.wait() + self._cleanup(p) p = test.start(program='no_script', interpreter='python') status = p.wait() + self._cleanup(p) assert status is not None, status try: @@ -2378,6 +2383,7 @@ pass else: status = p.wait() + self._cleanup(p) # Python versions that use os.popen3() or the Popen3 # class run things through the shell, which just returns # a non-zero exit status. @@ -2393,6 +2399,7 @@ self.run_match(p.stderr.read(), t.scriptx, "STDERR", t.workdir, repr([])) p.wait() + self._cleanup(p) p = testx.start(arguments='foo bar') self.run_match(p.stdout.read(), t.scriptx, "STDOUT", t.workdir, @@ -2400,6 +2407,7 @@ self.run_match(p.stderr.read(), t.scriptx, "STDERR", t.workdir, repr(['foo', 'bar'])) p.wait() + self._cleanup(p) p = testx.start(program=t.script, interpreter='python', arguments='bar') self.run_match(p.stdout.read(), t.script, "STDOUT", t.workdir, @@ -2407,6 +2415,7 @@ self.run_match(p.stderr.read(), t.script, "STDERR", t.workdir, repr(['bar'])) p.wait() + self._cleanup(p) p = testx.start(program=t.script1, interpreter=('python', '-x')) self.run_match(p.stdout.read(), t.script1, "STDOUT", t.workdir, @@ -2414,6 +2423,7 @@ self.run_match(p.stderr.read(), t.script1, "STDERR", t.workdir, repr([])) p.wait() + self._cleanup(p) s = os.path.join('.', t.scriptx) p = testx.start(program=[s]) @@ -2422,6 +2432,7 @@ self.run_match(p.stderr.read(), t.scriptx, "STDERR", t.workdir, repr([])) p.wait() + self._cleanup(p) try: testx.start(program='no_program') @@ -2437,6 +2448,7 @@ # we can wait() for it. try: p = p.wait() + self._cleanup(p) except OSError: pass @@ -2450,6 +2462,7 @@ self.run_match(p.stderr.read(), t.script1, "STDERR", t.workdir, repr([])) p.wait() + self._cleanup(p) finally: os.chdir(t.orig_cwd) @@ -2588,19 +2601,19 @@ p.stdin.write(to_bytes(input)) p.stdin.close() p.wait() - with open(t.recv_out_path, 'rb') as f: + with open(t.recv_out_path, 'r') as f: result = to_str(f.read()) - expect = 'script_recv: ' + input - assert result == expect, "Result:[%s] should match\nExpected:[%s]" % (result, expect) + expect = f"script_recv: {input}" + assert result == expect, f"Result:[{result}] should match\nExpected:[{expect}]" p = test.start(stdin=1) input = 'send() input to the receive script\n' p.send(input) p.stdin.close() p.wait() - with open(t.recv_out_path, 'rb') as f: + with open(t.recv_out_path, 'r') as f: result = to_str(f.read()) - expect = 'script_recv: ' + input + expect = f"script_recv: {input}" assert result == expect, repr(result) finally: @@ -2660,7 +2673,7 @@ assert stderr == expect_stderr, stderr with open(t.recv_out_path, 'rb') as f: result = f.read() - expect = ('script_recv: ' + input) * 2 + expect = f"script_recv: {input}" * 2 assert result == expect, (result, stdout, stderr) finally: @@ -2699,13 +2712,15 @@ def test_stdout(self): """Test stdout()""" run_env = TestCmd.TestCmd(workdir = '') - run_env.write('run1', """import sys + run_env.write('run1', """\ +import sys sys.stdout.write("run1 STDOUT %s\\n" % sys.argv[1:]) sys.stdout.write("run1 STDOUT second line\\n") sys.stderr.write("run1 STDERR %s\\n" % sys.argv[1:]) sys.stderr.write("run1 STDERR second line\\n") """) - run_env.write('run2', """import sys + run_env.write('run2', """\ +import sys sys.stdout.write("run2 STDOUT %s\\n" % sys.argv[1:]) sys.stdout.write("run2 STDOUT second line\\n") sys.stderr.write("run2 STDERR %s\\n" % sys.argv[1:]) @@ -2717,7 +2732,7 @@ test = TestCmd.TestCmd(interpreter = 'python', workdir = '') output = test.stdout() if output is not None: - raise IndexError("got unexpected output:\n\t`%s'\n" % output) + raise IndexError(f"got unexpected output:\n\t`{output}'\n") test.program_set('run1') test.run(arguments = 'foo bar') test.program_set('run2') @@ -2772,11 +2787,9 @@ class symlink_TestCase(TestCmdTestCase): + @unittest.skipIf(sys.platform == 'win32', "Skip symlink test on win32") def test_symlink(self): """Test symlink()""" - try: os.symlink - except AttributeError: return - test = TestCmd.TestCmd(workdir = '', subdir = 'foo') wdir_file1 = os.path.join(test.workdir, 'file1') wdir_target1 = os.path.join(test.workdir, 'target1') @@ -2847,7 +2860,7 @@ class timeout_TestCase(TestCmdTestCase): def test_initialization(self): - """Test initialization timeout""" + """Test initializating a TestCmd with a timeout""" test = TestCmd.TestCmd(workdir='', timeout=2) test.write('sleep.py', timeout_script) @@ -2881,40 +2894,39 @@ test = TestCmd.TestCmd(workdir='', timeout=8) test.write('sleep.py', timeout_script) - test.run([sys.executable, test.workpath('sleep.py'), '2'], - timeout=4) + test.run([sys.executable, test.workpath('sleep.py'), '2'], timeout=4) assert test.stderr() == '', test.stderr() assert test.stdout() == 'sleeping 2\nslept 2\n', test.stdout() - test.run([sys.executable, test.workpath('sleep.py'), '6'], - timeout=4) + test.run([sys.executable, test.workpath('sleep.py'), '6'], timeout=4) assert test.stderr() == '', test.stderr() assert test.stdout() == 'sleeping 6\n', test.stdout() - def test_set_timeout(self): - """Test set_timeout()""" - test = TestCmd.TestCmd(workdir='', timeout=2) - test.write('sleep.py', timeout_script) - - test.run([sys.executable, test.workpath('sleep.py'), '4']) - assert test.stderr() == '', test.stderr() - assert test.stdout() == 'sleeping 4\n', test.stdout() - - test.set_timeout(None) - - test.run([sys.executable, test.workpath('sleep.py'), '4']) - assert test.stderr() == '', test.stderr() - assert test.stdout() == 'sleeping 4\nslept 4\n', test.stdout() - - test.set_timeout(6) - - test.run([sys.executable, test.workpath('sleep.py'), '4']) - assert test.stderr() == '', test.stderr() - assert test.stdout() == 'sleeping 4\nslept 4\n', test.stdout() - - test.run([sys.executable, test.workpath('sleep.py'), '8']) - assert test.stderr() == '', test.stderr() - assert test.stdout() == 'sleeping 8\n', test.stdout() + # This method has been removed + #def test_set_timeout(self): + # """Test set_timeout()""" + # test = TestCmd.TestCmd(workdir='', timeout=2) + # test.write('sleep.py', timeout_script) + # + # test.run([sys.executable, test.workpath('sleep.py'), '4']) + # assert test.stderr() == '', test.stderr() + # assert test.stdout() == 'sleeping 4\n', test.stdout() + # + # test.set_timeout(None) + # + # test.run([sys.executable, test.workpath('sleep.py'), '4']) + # assert test.stderr() == '', test.stderr() + # assert test.stdout() == 'sleeping 4\nslept 4\n', test.stdout() + # + # test.set_timeout(6) + # + # test.run([sys.executable, test.workpath('sleep.py'), '4']) + # assert test.stderr() == '', test.stderr() + # assert test.stdout() == 'sleeping 4\nslept 4\n', test.stdout() + # + # test.run([sys.executable, test.workpath('sleep.py'), '8']) + # assert test.stderr() == '', test.stderr() + # assert test.stdout() == 'sleeping 8\n', test.stdout() @@ -3109,17 +3121,14 @@ assert wpath == os.path.join(test.workdir, 'foo', 'bar') -@unittest.skipIf(sys.platform == 'win32', "Don't run on win32") class readable_TestCase(TestCmdTestCase): + @unittest.skipIf(sys.platform == 'win32', "Skip permission fiddling on win32") def test_readable(self): """Test readable()""" test = TestCmd.TestCmd(workdir = '', subdir = 'foo') test.write('file1', "Test file #1\n") test.write(['foo', 'file2'], "Test file #2\n") - - try: symlink = os.symlink - except AttributeError: pass - else: symlink('no_such_file', test.workpath('dangling_symlink')) + os.symlink('no_such_file', test.workpath('dangling_symlink')) test.readable(test.workdir, 0) # XXX skip these tests if euid == 0? @@ -3152,15 +3161,13 @@ class writable_TestCase(TestCmdTestCase): + @unittest.skipIf(sys.platform == 'win32', "Skip permission fiddling on win32") def test_writable(self): """Test writable()""" test = TestCmd.TestCmd(workdir = '', subdir = 'foo') test.write('file1', "Test file #1\n") test.write(['foo', 'file2'], "Test file #2\n") - - try: symlink = os.symlink - except AttributeError: pass - else: symlink('no_such_file', test.workpath('dangling_symlink')) + os.symlink('no_such_file', test.workpath('dangling_symlink')) test.writable(test.workdir, 0) # XXX skip these tests if euid == 0? @@ -3189,17 +3196,14 @@ assert not _is_writable(test.workpath('file1')) -@unittest.skipIf(sys.platform == 'win32', "Don't run on win32") class executable_TestCase(TestCmdTestCase): + @unittest.skipIf(sys.platform == 'win32', "Skip permission fiddling on win32") def test_executable(self): """Test executable()""" test = TestCmd.TestCmd(workdir = '', subdir = 'foo') test.write('file1', "Test file #1\n") test.write(['foo', 'file2'], "Test file #2\n") - - try: symlink = os.symlink - except AttributeError: pass - else: symlink('no_such_file', test.workpath('dangling_symlink')) + os.symlink('no_such_file', test.workpath('dangling_symlink')) def make_executable(fname): st = os.stat(fname) diff -Nru scons-4.4.0+dfsg/testing/framework/TestCommon.py scons-4.5.2+dfsg/testing/framework/TestCommon.py --- scons-4.4.0+dfsg/testing/framework/TestCommon.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/testing/framework/TestCommon.py 2023-03-21 16:17:04.000000000 +0000 @@ -319,10 +319,10 @@ file_contents = self.read(file, mode) if not contains(file_contents, required, find): - print("File `%s' does not contain required string." % file) + print(f"File `{file}' does not contain required string.") print(self.banner('Required string ')) print(required) - print(self.banner('%s contents ' % file)) + print(self.banner(f'{file} contents ')) print(file_contents) self.fail_test() @@ -343,9 +343,9 @@ if not contains(output, input, find): if title is None: title = 'output' - print('Missing expected input from {}:'.format(title)) + print(f'Missing expected input from {title}:') print(input) - print(self.banner(title + ' ')) + print(self.banner(f"{title} ")) print(output) self.fail_test() @@ -367,10 +367,10 @@ if missing: if title is None: title = 'output' - sys.stdout.write("Missing expected lines from %s:\n" % title) + sys.stdout.write(f"Missing expected lines from {title}:\n") for line in missing: - sys.stdout.write(' ' + repr(line) + '\n') - sys.stdout.write(self.banner(title + ' ') + '\n') + sys.stdout.write(f" {repr(line)}\n") + sys.stdout.write(f"{self.banner(f'{title} ')}\n") sys.stdout.write(output) self.fail_test() @@ -394,10 +394,10 @@ if counts: if title is None: title = 'output' - sys.stdout.write("Unexpected number of lines from %s:\n" % title) + sys.stdout.write(f"Unexpected number of lines from {title}:\n") for line in counts: - sys.stdout.write(' ' + repr(line) + ": found " + str(counts[line]) + '\n') - sys.stdout.write(self.banner(title + ' ') + '\n') + sys.stdout.write(f" {repr(line)}: found {str(counts[line])}\n") + sys.stdout.write(f"{self.banner(f'{title} ')}\n") sys.stdout.write(output) self.fail_test() @@ -418,10 +418,10 @@ if title is None: title = 'output' - sys.stdout.write("Missing any expected line from %s:\n" % title) + sys.stdout.write(f"Missing any expected line from {title}:\n") for line in lines: - sys.stdout.write(' ' + repr(line) + '\n') - sys.stdout.write(self.banner(title + ' ') + '\n') + sys.stdout.write(f" {repr(line)}\n") + sys.stdout.write(f"{self.banner(f'{title} ')}\n") sys.stdout.write(output) self.fail_test() @@ -461,15 +461,15 @@ if title is None: title = 'output' if missing: - sys.stdout.write("Missing expected lines from %s:\n" % title) + sys.stdout.write(f"Missing expected lines from {title}:\n") for line in missing: - sys.stdout.write(' ' + repr(line) + '\n') - sys.stdout.write(self.banner('Missing %s ' % title) + '\n') + sys.stdout.write(f" {repr(line)}\n") + sys.stdout.write(f"{self.banner(f'Missing {title} ')}\n") if out: - sys.stdout.write("Extra unexpected lines from %s:\n" % title) + sys.stdout.write(f"Extra unexpected lines from {title}:\n") for line in out: - sys.stdout.write(' ' + repr(line) + '\n') - sys.stdout.write(self.banner('Extra %s ' % title) + '\n') + sys.stdout.write(f" {repr(line)}\n") + sys.stdout.write(f"{self.banner(f'Extra {title} ')}\n") sys.stdout.flush() self.fail_test() @@ -523,20 +523,41 @@ except KeyboardInterrupt: raise except: - print("Unexpected contents of `%s'" % file) + print(f"Unexpected contents of `{file}'") self.diff(expect, file_contents, 'contents ') raise + def must_match_file(self, file, golden_file, mode='rb', match=None, message=None, newline=None): + """Matches the contents of the specified file (first argument) + against the expected contents (second argument). The expected + contents are a list of lines or a string which will be split + on newlines. + """ + file_contents = self.read(file, mode, newline) + golden_file_contents = self.read(golden_file, mode, newline) + + if not match: + match = self.match + + try: + self.fail_test(not match(to_str(file_contents), to_str(golden_file_contents)), message=message) + except KeyboardInterrupt: + raise + except: + print("Unexpected contents of `%s'" % file) + self.diff(golden_file_contents, file_contents, 'contents ') + raise + def must_not_contain(self, file, banned, mode = 'rb', find = None): """Ensures that the specified file doesn't contain the banned text. """ file_contents = self.read(file, mode) if contains(file_contents, banned, find): - print("File `%s' contains banned string." % file) + print(f"File `{file}' contains banned string.") print(self.banner('Banned string ')) print(banned) - print(self.banner('%s contents ' % file)) + print(self.banner(f'{file} contents ')) print(file_contents) self.fail_test() @@ -559,10 +580,10 @@ if unexpected: if title is None: title = 'output' - sys.stdout.write("Unexpected lines in %s:\n" % title) + sys.stdout.write(f"Unexpected lines in {title}:\n") for line in unexpected: - sys.stdout.write(' ' + repr(line) + '\n') - sys.stdout.write(self.banner(title + ' ') + '\n') + sys.stdout.write(f" {repr(line)}\n") + sys.stdout.write(f"{self.banner(f'{title} ')}\n") sys.stdout.write(output) self.fail_test() @@ -606,7 +627,7 @@ Exits FAILED if the file doesn't exist or is empty. """ if not (os.path.exists(file) or os.path.islink(file)): - print("File doesn't exist: `%s'" % file) + print(f"File doesn't exist: `{file}'") self.fail_test(file) try: @@ -615,7 +636,7 @@ fsize = 0 if fsize == 0: - print("File is empty: `%s'" % file) + print(f"File is empty: `{file}'") self.fail_test(file) def must_not_be_writable(self, *files): @@ -643,8 +664,8 @@ if _failed(self, status): expect = '' if status != 0: - expect = " (expected %s)" % str(status) - print("%s returned %s%s" % (self.program, _status(self), expect)) + expect = f" (expected {str(status)})" + print(f"{self.program} returned {_status(self)}{expect}") print(self.banner('STDOUT ')) print(actual_stdout) print(self.banner('STDERR ')) @@ -676,7 +697,7 @@ """ arguments = self.options_arguments(options, arguments) try: - return TestCmd.start(self, program, interpreter, arguments, + return super().start(program, interpreter, arguments, universal_newlines, **kw) except KeyboardInterrupt: raise @@ -692,7 +713,7 @@ except IndexError: pass cmd_args = self.command_args(program, interpreter, arguments) - sys.stderr.write('Exception trying to execute: %s\n' % cmd_args) + sys.stderr.write(f'Exception trying to execute: {cmd_args}\n') raise e def finish(self, popen, stdout = None, stderr = '', status = 0, **kw): @@ -713,7 +734,7 @@ command. A value of None means don't test exit status. """ - TestCmd.finish(self, popen, **kw) + super().finish(popen, **kw) match = kw.get('match', self.match) self._complete(self.stdout(), stdout, self.stderr(), stderr, status, match) @@ -750,7 +771,7 @@ del kw['match'] except KeyError: match = self.match - TestCmd.run(self, **kw) + super().run(**kw) self._complete(self.stdout(), stdout, self.stderr(), stderr, status, match) @@ -796,6 +817,22 @@ # so this is an Aegis invocation; pass the test (exit 0). self.pass_test() + @staticmethod + def detailed_diff(value, expect): + v_split = value.split('\n') + e_split = expect.split('\n') + if len(v_split) != len(e_split): + print(f"different number of lines:{len(v_split)} {len(e_split)}") + + # breakpoint() + for v, e in zip(v_split, e_split): + # print("%s:%s"%(v,e)) + if v != e: + print(f"\n[{v}]\n[{e}]") + + return f"Expected:\n{expect}\nGot:\n{value}" + + # Local Variables: # tab-width:4 # indent-tabs-mode:nil diff -Nru scons-4.4.0+dfsg/testing/framework/TestCommonTests.py scons-4.5.2+dfsg/testing/framework/TestCommonTests.py --- scons-4.4.0+dfsg/testing/framework/TestCommonTests.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/testing/framework/TestCommonTests.py 2023-03-21 16:17:04.000000000 +0000 @@ -19,14 +19,12 @@ # AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. -__author__ = "Steven Knight " -__revision__ = "TestCommonTests.py 1.3.D001 2010/06/03 12:58:27 knight" - import os import re import signal import sys import unittest +from textwrap import dedent # Strip the current directory so we get the right TestCommon.py module. sys.path = sys.path[1:] @@ -34,15 +32,9 @@ import TestCmd import TestCommon +# this used to be a custom function, now use the stdlib equivalent def lstrip(s): - lines = [ _.expandtabs() for _ in s.split('\n') ] - if lines[0] == '': - lines = lines[1:] - spaces = len(re.match('^( *).*', lines[0]).group(1)) - if spaces: - lines = [ l[spaces:] for l in lines ] - return '\n'.join(lines) - + return dedent(s) expected_newline = '\\n' @@ -52,17 +44,16 @@ expect = expect.pattern except AttributeError: pass - result = [ + display = [ '\n', - 'EXPECTED'+('*'*80) + '\n', + f"{'EXPECTED: ':*<80}\n", expect, - 'GOT'+('*'*80) + '\n', + f"{'GOT: ':*<80}\n", result, - ('*'*80) + '\n', + '' if error is None else error, + f"{'':*<80}\n", ] - if error: - result.append(error) - return ''.join(result) + return ''.join(display) class TestCommonTestCase(unittest.TestCase): @@ -355,7 +346,7 @@ """) run_env.run(program=sys.executable, stdin=script) stdout = run_env.stdout() - assert stdout == expect, "got:\n%s\nexpected:\n%s"%(stdout, expect) + assert stdout == expect, f"got:\n{stdout}\nexpected:\n{expect}" stderr = run_env.stderr() assert stderr.find("FAILED") != -1, stderr @@ -987,6 +978,7 @@ stderr = run_env.stderr() assert stderr == "PASSED\n", stderr + @unittest.skipIf(sys.platform == 'win32', "Skip symlink test on win32") def test_broken_link(self) : """Test must_exist(): exists but it is a broken link""" run_env = self.run_env @@ -1334,7 +1326,7 @@ """) run_env.run(program=sys.executable, stdin=script) stdout = run_env.stdout() - assert stdout == expect, "\ngot:\n%s\nexpected:\n%s" % (stdout, expect) + assert stdout == expect, f"\ngot:\n{stdout}\nexpected:\n{expect}" stderr = run_env.stderr() assert stderr.find("FAILED") != -1, stderr @@ -1359,7 +1351,7 @@ """) run_env.run(program=sys.executable, stdin=script) stdout = run_env.stdout() - assert stdout == expect, "\ngot:\n%s\nexpected:\n%s" % (stdout, expect) + assert stdout == expect, f"\ngot:\n{stdout}\nexpected:\n{expect}" stderr = run_env.stderr() assert stderr.find("FAILED") != -1, stderr @@ -1660,6 +1652,7 @@ stderr = run_env.stderr() assert stderr == "PASSED\n", stderr + @unittest.skipIf(sys.platform == 'win32', "Skip symlink test on win32") def test_existing_broken_link(self): """Test must_not_exist(): exists but it is a broken link""" run_env = self.run_env @@ -1886,7 +1879,7 @@ FAILED test of .*fail \\tat line \\d+ of .*TestCommon\\.py \\(_complete\\) \\tfrom line \\d+ of .*TestCommon\\.py \\(run\\) - \\tfrom line \\d+ of ( \(\))? + \\tfrom line \\d+ of ( \\(\\))? """) expect_stderr = re.compile(expect_stderr, re.M) @@ -1939,23 +1932,24 @@ STDOUT ========================================================================= None STDERR ========================================================================= + None """) - expect_stderr = lstrip("""\ - Exception trying to execute: \\[%s, '[^']*pass'\\] - Traceback \\(most recent call last\\): - File "", line \\d+, in (\\?|) - File "[^"]+TestCommon.py", line \\d+, in run - TestCmd.run\\(self, \\*\\*kw\\) - File "[^"]+TestCmd.py", line \\d+, in run - .* - File "[^"]+TestCommon.py", line \\d+, in start - raise e - File "[^"]+TestCommon.py", line \\d+, in start - return TestCmd.start\\(self, program, interpreter, arguments, - File "", line \\d+, in raise_exception - TypeError: forced TypeError - """ % re.escape(repr(sys.executable))) + expect_stderr = lstrip( + fr"""Exception trying to execute: \[{re.escape(repr(sys.executable))}, '[^']*pass'\] +Traceback \(most recent call last\): + File "", line \d+, in (\?|) + File "[^"]+TestCommon.py", line \d+, in run + super\(\).run\(\*\*kw\) + File "[^"]+TestCmd.py", line \d+, in run + p = self.start\(program=program, +(?:\s*\^*\s)? File \"[^\"]+TestCommon.py\", line \d+, in start + raise e + File "[^"]+TestCommon.py", line \d+, in start + return super\(\).start\(program, interpreter, arguments, +(?:\s*\^*\s)? File \"\", line \d+, in raise_exception +TypeError: forced TypeError +""") expect_stderr = re.compile(expect_stderr, re.M) self.run_execution_test(script, expect_stdout, expect_stderr) @@ -2072,7 +2066,7 @@ FAILED test of .*pass \\tat line \\d+ of .*TestCommon\\.py \\(_complete\\) \\tfrom line \\d+ of .*TestCommon\\.py \\(run\\) - \\tfrom line \\d+ of ( \(\))? + \\tfrom line \\d+ of ( \\(\\))? """) expect_stderr = re.compile(expect_stderr, re.M) @@ -2102,7 +2096,7 @@ FAILED test of .*fail \\tat line \\d+ of .*TestCommon\\.py \\(_complete\\) \\tfrom line \\d+ of .*TestCommon\\.py \\(run\\) - \\tfrom line \\d+ of ( \(\))? + \\tfrom line \\d+ of ( \\(\\))? """) expect_stderr = re.compile(expect_stderr, re.M) @@ -2134,7 +2128,7 @@ FAILED test of .*pass \\tat line \\d+ of .*TestCommon\\.py \\(_complete\\) \\tfrom line \\d+ of .*TestCommon\\.py \\(run\\) - \\tfrom line \\d+ of ( \(\))? + \\tfrom line \\d+ of ( \\(\\))? """) expect_stderr = re.compile(expect_stderr, re.M) @@ -2168,7 +2162,7 @@ FAILED test of .*stderr \\tat line \\d+ of .*TestCommon\\.py \\(_complete\\) \\tfrom line \\d+ of .*TestCommon\\.py \\(run\\) - \\tfrom line \\d+ of ( \(\))? + \\tfrom line \\d+ of ( \\(\\))? """) expect_stderr = re.compile(expect_stderr, re.M) @@ -2222,11 +2216,11 @@ tc.run() """) - self.SIGTERM = int(signal.SIGTERM) + self.SIGTERM = f"{'' if sys.platform == 'win32' else '-'}{signal.SIGTERM}" # Script returns the signal value as a negative number. expect_stdout = lstrip("""\ - %(signal_script)s returned -%(SIGTERM)s + %(signal_script)s returned %(SIGTERM)s STDOUT ========================================================================= STDERR ========================================================================= @@ -2389,13 +2383,13 @@ ] script = "import TestCommon\n" + \ - '\n'.join([ "print(TestCommon.%s)\n" % v for v in variables ]) + '\n'.join([f"print(TestCommon.{v})\n" for v in variables]) run_env.run(program=sys.executable, stdin=script) stderr = run_env.stderr() assert stderr == "", stderr script = "from TestCommon import *\n" + \ - '\n'.join([ "print(%s)" % v for v in variables ]) + '\n'.join([f"print({v})" for v in variables]) run_env.run(program=sys.executable, stdin=script) stderr = run_env.stderr() assert stderr == "", stderr @@ -2403,38 +2397,8 @@ if __name__ == "__main__": - tclasses = [ - __init__TestCase, - banner_TestCase, - must_be_writable_TestCase, - must_contain_TestCase, - must_contain_all_lines_TestCase, - must_contain_any_line_TestCase, - must_contain_exactly_lines_TestCase, - must_contain_lines_TestCase, - must_exist_TestCase, - must_exist_one_of_TestCase, - must_match_TestCase, - must_not_be_writable_TestCase, - must_not_contain_TestCase, - must_not_contain_any_line_TestCase, - must_not_contain_lines_TestCase, - must_not_exist_TestCase, - must_not_exist_any_of_TestCase, - must_not_be_empty_TestCase, - run_TestCase, - start_TestCase, - skip_test_TestCase, - variables_TestCase, - ] - suite = unittest.TestSuite() - for tclass in tclasses: - loader = unittest.TestLoader() - loader.testMethodPrefix = 'test_' - names = loader.getTestCaseNames(tclass) - suite.addTests([tclass(n) for n in names]) - if not unittest.TextTestRunner().run(suite).wasSuccessful(): - sys.exit(1) + unittest.main() + # Local Variables: # tab-width:4 diff -Nru scons-4.4.0+dfsg/testing/framework/test-framework.rst scons-4.5.2+dfsg/testing/framework/test-framework.rst --- scons-4.4.0+dfsg/testing/framework/test-framework.rst 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/testing/framework/test-framework.rst 2023-03-21 16:17:04.000000000 +0000 @@ -61,7 +61,7 @@ End-to-end tests are by their nature harder to debug. You can drop straight into the Python debugger on the unit test scripts by using the ``runtest.py --pdb`` option, but the end-to-end -tests treat an SCons invocation as a "black box" and just look for +tests treat an SCons invocation as a *black box* and just look for external effects; simple methods like inserting ``print`` statements in the SCons code itself can disrupt those external effects. See `Debugging end-to-end tests`_ for some more thoughts. @@ -72,7 +72,7 @@ The end-to-end tests, more or less, stick to the following naming conventions: -#. All tests end with a .py suffix. +#. All tests end with a ``.py`` suffix. #. In the *General* form we use ``Feature.py`` @@ -161,7 +161,7 @@ that path-component in the testing directory. The use of an ephemeral test directory means that you can't simply change -into a directory to "debug things" after a test has gone wrong. +into a directory to debug after a test has gone wrong. For a way around this, check out the ``PRESERVE`` environment variable. It can be seen in action in `How to convert old tests to use fixures`_ below. @@ -170,7 +170,7 @@ If you simply want to check which tests would get executed, you can call the ``runtest.py`` script with the ``-l`` option combined with whichever -test finding options (see below) you intend to use. Example:: +test selection options (see below) you intend to use. Example:: $ python runtest.py -l test/scons-time @@ -179,8 +179,8 @@ $ python runtest.py -n -a -Finding Tests -============= +Selecting tests +=============== When started in *standard* mode:: @@ -209,11 +209,11 @@ option. -Example End-to-End Test Script +Example End-to-End test script ============================== To illustrate how the end-to-end test scripts work, let's walk through -a simple "Hello, world!" example:: +a simple *Hello, world!* example:: #!python import TestSCons @@ -241,6 +241,8 @@ test.pass_test() +Explanation +----------- ``import TestSCons`` Imports the main infrastructure for writing SCons tests. This is @@ -249,7 +251,7 @@ imported before this line. ``test = TestSCons.TestSCons()`` - This initializes an object for testing. A fair amount happens under + Initializes an object for testing. A fair amount happens under the covers when the object is created, including: * A temporary directory is created for all the in-line files that will @@ -302,8 +304,8 @@ In the simple example above, the files to set up the test are created on the fly by the test program. We give a filename to the ``TestSCons.write()`` -method, and a string holding its contents, and it gets written to the test -directory right before starting.. +method, plus a string holding its contents, and it gets written to the test +directory right before starting. This simple technique can be seen throughout most of the end-to-end tests as it was the original technique provided to test developers, @@ -321,12 +323,12 @@ In testing parlance, a fixture is a repeatable test setup. The SCons test harness allows the use of saved files or directories to be used -in that sense: "the fixture for this test is foo", instead of writing +in that sense: *the fixture for this test is foo*, instead of writing a whole bunch of strings to create files. Since these setups can be reusable across multiple tests, the *fixture* terminology applies well. Note: fixtures must not be treated by SCons as runnable tests. To exclude -them, see instructions in the above section named "Finding Tests". +them, see instructions in the above section named `Selecting tests`_. Directory fixtures ------------------ @@ -390,9 +392,11 @@ Again, a reference example can be found in the current revision of SCons, see ``test/packaging/sandbox-test/sandbox-test.py``. -For even more examples you should check out -one of the external Tools, e.g. the *Qt4* Tool at -https://bitbucket.org/dirkbaechle/scons_qt4. Also visit the SCons Tools +For even more examples you should check out one of the external Tools, +e.g. the *Qt5* Tool at +https://github.com/SCons/scons-contrib/tree/master/sconscontrib/SCons/Tool/qt5. +There are many other tools in the contrib repository, +and you can also visit the SCons Tools Index at https://github.com/SCons/scons/wiki/ToolsIndex for a complete list of available Tools, though not all may have tests yet. @@ -453,23 +457,24 @@ import TestSCons _python_ = TestSCons._python_ - test.write('SConstruct', """ + test.write('SConstruct', f""" cc = Environment().Dictionary('CC') env = Environment( - LINK=r'%(_python_)s mylink.py', + LINK=r'{_python_} mylink.py', LINKFLAGS=[], - CC=r'%(_python_)s mycc.py', + CC=r'{_python_} mycc.py', CXX=cc, CXXFLAGS=[], ) env.Program(target='test1', source='test1.c') - """ % locals()) + """ -Here the value of ``_python_`` is picked out of the script's -``locals`` dictionary - which works because we've set it above - -and interpolated using a mapping key into the string that will -be written to ``SConstruct``. A fixture would be hard to use -here because we don't know the value of ``_python_`` until runtime. +Here the value of ``_python_`` from the test program is +pasted in via f-string formatting. A fixture would be hard to use +here because we don't know the value of ``_python_`` until runtime +(also note that as it will be a full pathname, it's entered as a +Python rawstring to avoid interpretation problems on Windows, +where the path separator is a backslash). The other files created in this test may still be candidates for use as fixture files, however. @@ -518,7 +523,7 @@ You can now go to the save directory reported from this run and invoke the test manually to see what it is doing, without the presence of the test infrastructure which would otherwise -"swallow" output you may be interested in. In this case, +consume output you may be interested in. In this case, adding debug prints may be more useful. @@ -528,17 +533,17 @@ The main test API is defined in the ``TestSCons`` class. ``TestSCons`` is a subclass of ``TestCommon``, which is a subclass of ``TestCmd``. All those classes are defined in Python files of the same name -in ``testing/framework``. +in ``testing/framework``. Start in ``testing/framework/TestCmd.py`` for the base API definitions, like how to create files (``test.write()``) and run commands (``test.run()``). Use ``TestSCons`` for the end-to-end tests in ``test``, but use -``TestCmd`` for the unit tests in the ``src`` directory. +``TestCmd`` for the unit tests in the ``SCons`` directory. The match functions work like this: ``TestSCons.match_re`` - match each line with a RE + match each line with an RE * Splits the lines into a list (unless they already are) * splits the REs at newlines (unless already a list) @@ -614,14 +619,84 @@ behavior of the executable. Many examples of this can be found in the ``test`` directory. See for example ``test/subdivide.py``. -This leads to a suggestion for E2E test organization because the framework -doesn't have a way to indicate a partial skip - if you executed -200 lines of test, then found a condition which caused you to skip the -last 20 lines, the whole test is marked as a skip; -it also doesn't have a way to indicate a partial pass. -To improve on this, keep tool tests which don't need the -underlying program in separate files from ones which do - -that way one can see in the test results that the "plumbing" -tests worked even if the the ones using the underlying program -maybe were skipped. +Testing DOs and DONTs +===================== + +There's no question that having to write tests in order to get a change +approved - even an apparently trivial change - does make it a little harder +to contribute to the SCons code base - but the requirement to have features +and bugfixes testable is a necessary part of ensuring SCons quality. +Thinking of SCons development in terms of the red/green model from +Test Driven Development should make things a little easier. + +If you are working on an SCons bug, try to come up with a simple +reproducer first. Bug reports (even your own!) are often like *I tried +to do this but it surprisingly failed*, and a reproducer is normally an +``SConstruct`` along with, probably, some supporting files such as source +files, data files, subsidiary SConscripts, etc. Try to make this example +as simple and clean as possible. No, this isn't necessarily easy to do, +but winnowing down what triggers a problem and removing the stuff that +doesn't actually contribute to triggering the problem it is a step that +lets you (and later readers) more clearly understand what is going on. +You don't have to turn this into a formal testcase yet, but keep this +reproducer around, and document with it what you expect to happen, +and what actually happens. This material will help produce an E2E +test later, and this is something you *may* be able to get help with, +if the way the tests are usually written and the test harness proves +too confusing. With a clean test in hand (make sure it's failing!) +you can go ahead an code up a fix and make sure it passes with the fix +in place. Jumping straight to a fix without working on a testcase like +this will often lead to a disappointing *how do I come up with a test +so the maintainer will be willing to merge* phase. Asking questions on +a public forum can be productive here. + +E2E-specific Suggestions: + +* Do not require the use of an external tool unless necessary. + Usually the SCons behavior is the thing we want to test, + not the behavior of the external tool. *Necessary* is not a precise term - + sometimes it would be too time-consuming to write a script to mock + a compiler with an extensive set of options, and sometimes it's + not a good idea to assume you know what all those will do vs what + the real tool does; there may be other good reasons for just going + ahead and calling the external tool. +* If using an external tool, be prepared to skip the test if it is unavailable. +* Do not combine tests that need an external tool with ones that + do not - divide these into separate test files. There is no concept + of partial skip for e2e tests, so if you successfully complete seven + of eight tests, and then come to a conditional "skip if tool missing" + or "skip if on Windows", and that branch is taken, then the + whole test file ends up skipped, and the seven that ran will + never be recorded. Some tests follow the convention of creating a + second test file with the ending ``-live`` for the part that requires + actually running the external tool. +* In testing, *fail fast* is not always the best policy - if you can think + of many scenarios that could go wrong and they are all run linearly in + a single test file, then you only hear about the first one that fails. + In some cases it may make sense to split them out a bit more, so you + can see several fails at once, which may show a helpful failure pattern + you wouldn't spot from a single fail. +* Use test fixtures where it makes sense, and in particular, try to + make use of shareable mocked tools, which, by getting lots of use, + will be better debugged (that is, don't have each test produce its + own ``myfortan.py`` or ``mylex.py`` etc. unless they need drastically + different behaviors). + +Unittest-specific hints: + +- Let the ``unittest`` module help! Lots of the existing tests just + use a bare ``assert`` call for checks, which works fine, but then + you are responsible for preparing the message if it fails. The base + ``TestCase`` class has methods which know how to display many things, + for example ``self.assertEqual()`` displays in what way the two arguments + differ if they are *not* equal. Checking for am expected exception can + be done with ``self.assertRaises()`` rather than crafting a stub of + code using a try block for this situation. +- The *fail fast* consideration applies here, too: try not to fail a whole + testcase on the first problem, if there are more checks to go. + Again, existing tests may use elaborate tricks for this, but modern + ``unittest`` has a ``subTest`` context manager that can be used to wrap + each distinct piece and not abort the testcase for a failing subtest + (to be fair, this functionality is a recent addition, after most SCons + unit tests were written - but it should be used going forward). diff -Nru scons-4.4.0+dfsg/testing/framework/TestRuntest.py scons-4.5.2+dfsg/testing/framework/TestRuntest.py --- scons-4.4.0+dfsg/testing/framework/TestRuntest.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/testing/framework/TestRuntest.py 2023-03-21 16:17:04.000000000 +0000 @@ -51,11 +51,7 @@ ] ) -if re.search(r'\s', python): - pythonstring = _python_ -else: - pythonstring = python -pythonstring = pythonstring.replace('\\', '\\\\') +pythonstring = python pythonflags = '' failing_test_template = """\ diff -Nru scons-4.4.0+dfsg/testing/framework/TestSConsMSVS.py scons-4.5.2+dfsg/testing/framework/TestSConsMSVS.py --- scons-4.4.0+dfsg/testing/framework/TestSConsMSVS.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/testing/framework/TestSConsMSVS.py 2023-03-21 16:17:04.000000000 +0000 @@ -690,7 +690,7 @@ orig = 'sys.path = [ join(sys' enginepath = repr(os.path.join(self._cwd, '..', 'engine')) - replace = 'sys.path = [ %s, join(sys' % enginepath + replace = f'sys.path = [ {enginepath}, join(sys' contents = self.read(fname, mode='r') contents = contents.replace(orig, replace) @@ -719,9 +719,9 @@ project_guid = "{B0CC4EE9-0174-51CD-A06A-41D0713E928A}" if 'SCONS_LIB_DIR' in os.environ: - exec_script_main = "from os.path import join; import sys; sys.path = [ r'%s' ] + sys.path; import SCons.Script; SCons.Script.main()" % os.environ['SCONS_LIB_DIR'] + exec_script_main = f"from os.path import join; import sys; sys.path = [ r'{os.environ['SCONS_LIB_DIR']}' ] + sys.path; import SCons.Script; SCons.Script.main()" else: - exec_script_main = "from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-%s'), join(sys.prefix, 'scons-%s'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons') ] + sys.path; import SCons.Script; SCons.Script.main()" % (self.scons_version, self.scons_version) + exec_script_main = f"from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-{self.scons_version}'), join(sys.prefix, 'scons-{self.scons_version}'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons') ] + sys.path; import SCons.Script; SCons.Script.main()" exec_script_main_xml = exec_script_main.replace("'", "'") result = input.replace(r'', workpath) @@ -823,7 +823,7 @@ elif major > 10: return '12.00' else: - raise SCons.Errors.UserError('Received unexpected VC version %s' % vc_version) + raise SCons.Errors.UserError(f'Received unexpected VC version {vc_version}') def _get_solution_file_vs_number(self, vc_version): """ @@ -846,7 +846,7 @@ elif major == 14 and minor == 2: return '16' else: - raise SCons.Errors.UserError('Received unexpected VC version %s' % vc_version) + raise SCons.Errors.UserError(f'Received unexpected VC version {vc_version}') def _get_vcxproj_file_tools_version(self, vc_version): """ @@ -877,7 +877,7 @@ # ToolsVersion='17' return '17.0' else: - raise SCons.Errors.UserError('Received unexpected VC version %s' % vc_version) + raise SCons.Errors.UserError(f'Received unexpected VC version {vc_version}') def _get_vcxproj_file_cpp_path(self, dirs): """Returns the include paths expected in the .vcxproj file""" diff -Nru scons-4.4.0+dfsg/testing/framework/TestSCons.py scons-4.5.2+dfsg/testing/framework/TestSCons.py --- scons-4.4.0+dfsg/testing/framework/TestSCons.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/testing/framework/TestSCons.py 2023-03-21 16:17:04.000000000 +0000 @@ -44,7 +44,7 @@ from collections import namedtuple from TestCommon import * -from TestCommon import __all__ +from TestCommon import __all__, _python_ from SCons.Util import get_hash_format, get_current_hash_algorithm_used from TestCmd import Popen @@ -55,20 +55,14 @@ # here provides some independent verification that what we packaged # conforms to what we expect. -default_version = '4.3.1ayyyymmdd' +default_version = '4.5.2ayyyymmdd' # TODO: these need to be hand-edited when there are changes python_version_unsupported = (3, 6, 0) python_version_deprecated = (3, 6, 0) python_version_supported_str = "3.6.0" # str of lowest non-deprecated version -# In the checked-in source, the value of SConsVersion in the following -# line must remain "__ VERSION __" (without the spaces) so the built -# version in build/testing/framework/TestSCons.py contains the actual version -# string of the packages that have been built. -SConsVersion = '__VERSION__' -if SConsVersion == '__' + 'VERSION' + '__': - SConsVersion = default_version +SConsVersion = default_version __all__.extend([ 'TestSCons', @@ -126,7 +120,7 @@ # re.escape escapes too much. def re_escape(str): for c in '\\.[]()*+?': # Not an exhaustive list. - str = str.replace(c, '\\' + c) + str = str.replace(c, f"\\{c}") return str @@ -370,7 +364,7 @@ """ env = self.Environment(ENV) if env: - v = env.subst('$' + var) + v = env.subst(f"${var}") if not v: return None if prog is None: @@ -461,7 +455,7 @@ """ sconsflags = initialize_sconsflags(self.ignore_python_version) try: - TestCommon.run(self, *args, **kw) + super().run(*args, **kw) finally: restore_sconsflags(sconsflags) @@ -495,26 +489,26 @@ """ s = "" for arg in arguments.split(): - s = s + "scons: `%s' is up to date.\n" % arg + s = f"{s}scons: `{arg}' is up to date.\n" kw['arguments'] = arguments stdout = self.wrap_stdout(read_str=read_str, build_str=s) # Append '.*' so that timing output that comes after the # up-to-date output is okay. - kw['stdout'] = re.escape(stdout) + '.*' + kw['stdout'] = f"{re.escape(stdout)}.*" kw['match'] = self.match_re_dotall self.run(**kw) - def not_up_to_date(self, arguments='.', **kw): + def not_up_to_date(self, arguments='.', read_str="", **kw): """Asserts that none of the targets listed in arguments is up to date, but does not make any assumptions on other targets. This function is most useful in conjunction with the -n option. """ s = "" for arg in arguments.split(): - s = s + "(?!scons: `%s' is up to date.)" % re.escape(arg) - s = '(' + s + '[^\n]*\n)*' + s = f"{s}(?!scons: `{re.escape(arg)}' is up to date.)" + s = f"({s}[^\n]*\n)*" kw['arguments'] = arguments - stdout = re.escape(self.wrap_stdout(build_str='ARGUMENTSGOHERE')) + stdout = re.escape(self.wrap_stdout(read_str=read_str, build_str='ARGUMENTSGOHERE')) kw['stdout'] = stdout.replace('ARGUMENTSGOHERE', s) kw['match'] = self.match_re_dotall self.run(**kw) @@ -524,7 +518,7 @@ Verifies expected behavior for options that are not yet implemented: a warning message, and exit status 1. """ - msg = "Warning: the %s option is not yet implemented\n" % option + msg = f"Warning: the {option} option is not yet implemented\n" kw['stderr'] = msg if arguments: # If it's a long option and the argument string begins with '=', @@ -532,14 +526,14 @@ if option[:2] == '--' and arguments[0] == '=': kw['arguments'] = option + arguments else: - kw['arguments'] = option + ' ' + arguments + kw['arguments'] = f"{option} {arguments}" return self.run(**kw) def deprecated_wrap(self, msg): """ Calculate the pattern that matches a deprecation warning. """ - return '\nscons: warning: ' + re_escape(msg) + '\n' + file_expr + return f"\nscons: warning: {re_escape(msg)}\n{file_expr}" def deprecated_fatal(self, warn, msg): """ @@ -601,11 +595,11 @@ def RunPair(option, expected): # run the same test with the option on the command line and # then with the option passed via SetOption(). - self.run(options='--warn=' + option, + self.run(options=f"--warn={option}", arguments='.', stderr=expected, match=match_re_dotall) - self.run(options='WARN=' + option, + self.run(options=f"WARN={option}", arguments='.', stderr=expected, match=match_re_dotall) @@ -617,8 +611,8 @@ RunPair(warn, warning) # warning disabled, should get either nothing or mandatory message - expect = """()|(Can not disable mandataory warning: 'no-%s'\n\n%s)""" % (warn, warning) - RunPair('no-' + warn, expect) + expect = f"""()|(Can not disable mandataory warning: 'no-{warn}'\n\n{warning})""" + RunPair(f"no-{warn}", expect) return warning @@ -658,7 +652,7 @@ # x = x.replace('', file) # x = x.replace('line 1,', 'line %s,' % line) # x="\n".join(x) - x = 'File "%s", line %s, in \n' % (file, line) + x = f'File "{file}", line {line}, in \n' return x def normalize_ps(self, s): @@ -751,7 +745,7 @@ if hash_format is None and current_hash_algorithm == 'md5': return ".sconsign" else: - database_prefix=".sconsign_%s" % current_hash_algorithm + database_prefix=f".sconsign_{current_hash_algorithm}" return database_prefix @@ -797,13 +791,13 @@ if version: if sys.platform == 'win32': patterns = [ - 'C:/Program Files*/Java/jdk*%s*/bin' % version, + f'C:/Program Files*/Java/jdk*{version}*/bin', ] else: patterns = [ - '/usr/java/jdk%s*/bin' % version, - '/usr/lib/jvm/*-%s*/bin' % version, - '/usr/local/j2sdk%s*/bin' % version, + f'/usr/java/jdk{version}*/bin', + f'/usr/lib/jvm/*-{version}*/bin', + f'/usr/local/j2sdk{version}*/bin', ] java_path = self.paths(patterns) + [env['ENV']['PATH']] else: @@ -848,10 +842,10 @@ '/usr/lib/jvm/default-java/include/jni.h', '/usr/lib/jvm/java-*-oracle/include/jni.h'] else: - jni_dirs = ['/System/Library/Frameworks/JavaVM.framework/Versions/%s*/Headers/jni.h' % version] - jni_dirs.extend(['/usr/lib/jvm/java-*-sun-%s*/include/jni.h' % version, - '/usr/lib/jvm/java-%s*-openjdk*/include/jni.h' % version, - '/usr/java/jdk%s*/include/jni.h' % version]) + jni_dirs = [f'/System/Library/Frameworks/JavaVM.framework/Versions/{version}*/Headers/jni.h'] + jni_dirs.extend([f'/usr/lib/jvm/java-*-sun-{version}*/include/jni.h', + f'/usr/lib/jvm/java-{version}*-openjdk*/include/jni.h', + f'/usr/java/jdk{version}*/include/jni.h']) dirs = self.paths(jni_dirs) if not dirs: return None @@ -896,10 +890,10 @@ if os.path.exists(home): return home else: - if java_home.find('jdk%s' % version) != -1: + if java_home.find(f'jdk{version}') != -1: return java_home for home in [ - '/System/Library/Frameworks/JavaVM.framework/Versions/%s/Home' % version, + f'/System/Library/Frameworks/JavaVM.framework/Versions/{version}/Home', # osx 10.10 '/System/Library/Frameworks/JavaVM.framework/Versions/Current/' ]: @@ -909,7 +903,7 @@ home = '' else: jar = self.java_where_jar(version) - home = os.path.normpath('%s/..' % jar) + home = os.path.normpath(f'{jar}/..') if home and os.path.isdir(home): return home @@ -934,7 +928,7 @@ or b"Unable to locate a Java Runtime" in cp.stdout ): self.skip_test( - "Could not find Java " + java_bin_name + ", skipping test.\n", + f"Could not find Java {java_bin_name}, skipping test.\n", from_fw=True, ) @@ -1003,7 +997,7 @@ status=None) # Note recent versions output version info to stdout instead of stderr if version: - verf = 'javac %s' % version + verf = f'javac {version}' if self.stderr().find(verf) == -1 and self.stdout().find(verf) == -1: fmt = "Could not find javac for Java version %s, skipping test(s).\n" self.skip_test(fmt % version, from_fw=True) @@ -1159,6 +1153,7 @@ self.write([dir, 'lib', 'SConstruct'], r""" import sys +DefaultEnvironment(tools=[]) # test speedup env = Environment() if sys.platform == 'win32': env.StaticLibrary('myqt', 'my_qobject.cpp') @@ -1173,20 +1168,24 @@ self.QT = self.workpath(dir) self.QT_LIB = 'myqt' - self.QT_MOC = '%s %s' % (_python_, self.workpath(dir, 'bin', 'mymoc.py')) - self.QT_UIC = '%s %s' % (_python_, self.workpath(dir, 'bin', 'myuic.py')) + self.QT_MOC = f"{_python_} {self.workpath(dir, 'bin', 'mymoc.py')}" + self.QT_UIC = f"{_python_} {self.workpath(dir, 'bin', 'myuic.py')}" self.QT_LIB_DIR = self.workpath(dir, 'lib') - def Qt_create_SConstruct(self, place): + def Qt_create_SConstruct(self, place, qt_tool='qt3'): if isinstance(place, list): place = test.workpath(*place) - self.write(place, """\ + + var_prefix=qt_tool.upper() + self.write(place, f"""\ if ARGUMENTS.get('noqtdir', 0): - QTDIR = None + {var_prefix}DIR = None else: - QTDIR = r'%s' + {var_prefix}DIR = r'{self.QT}' +DefaultEnvironment(tools=[]) # test speedup env = Environment( - QTDIR=QTDIR, QT_LIB=r'%s', QT_MOC=r'%s', QT_UIC=r'%s', tools=['default', 'qt'] + {var_prefix}DIR={var_prefix}DIR, {var_prefix}_LIB=r'{self.QT_LIB}', {var_prefix}_MOC=r'{self.QT_MOC}', + {var_prefix}_UIC=r'{self.QT_UIC}', tools=['default', '{qt_tool}'] ) dup = 1 if ARGUMENTS.get('variant_dir', 0): @@ -1207,7 +1206,7 @@ sconscript = File('SConscript') Export("env dup") SConscript(sconscript) -""" % (self.QT, self.QT_LIB, self.QT_MOC, self.QT_UIC)) +""") NCR = 0 # non-cached rebuild CR = 1 # cached rebuild (up to date) @@ -1239,7 +1238,7 @@ """ if check_platform: if sys.platform != 'win32': - msg = "Skipping Visual C/C++ test on non-Windows platform '%s'\n" % sys.platform + msg = f"Skipping Visual C/C++ test on non-Windows platform '{sys.platform}'\n" self.skip_test(msg, from_fw=True) return @@ -1293,14 +1292,14 @@ if doCheckLog: lastEnd = match_part_of_configlog(log, logfile, lastEnd) - log = "\t" + re.escape("Configure(confdir = %s)" % sconf_dir) + ls + log = f"\t{re.escape(f'Configure(confdir = {sconf_dir})')}" + ls if doCheckLog: lastEnd = match_part_of_configlog(log, logfile, lastEnd) rdstr = "" for check_info in checks: - log = re.escape("scons: Configure: " + check_info.check_string) + ls + log = re.escape(f"scons: Configure: {check_info.check_string}") + ls if doCheckLog: lastEnd = match_part_of_configlog(log, logfile, lastEnd) @@ -1316,9 +1315,9 @@ # rebuild will pass if ext in ['.c', '.cpp']: log = log + conf_filename + re.escape(" <-") + ls - log = log + r"( \|" + nols + "*" + ls + ")+?" + log = f"{log}( \\|{nols}*{ls})+?" else: - log = log + "(" + nols + "*" + ls + ")*?" + log = f"{log}({nols}*{ls})*?" result_cached = 0 if flag == self.CR: # CR = cached rebuild (up to date)s @@ -1329,10 +1328,10 @@ re.escape("\" is up to date.") + ls log = log + re.escape("scons: Configure: The original builder " "output was:") + ls - log = log + r"( \|.*" + ls + ")+" + log = f"{log}( \\|.*{ls})+" if flag == self.NCF: # non-cached rebuild failure - log = log + "(" + nols + "*" + ls + ")*?" + log = f"{log}({nols}*{ls})*?" result_cached = 0 if flag == self.CF: # cached rebuild failure @@ -1341,14 +1340,14 @@ conf_filename + \ re.escape("\" failed in a previous run and all its sources are up to date.") + ls log = log + re.escape("scons: Configure: The original builder output was:") + ls - log = log + r"( \|.*" + ls + ")+" + log = f"{log}( \\|.*{ls})+" if result_cached: - result = "(cached) " + check_info.result + result = f"(cached) {check_info.result}" else: result = check_info.result - rdstr = rdstr + re.escape(check_info.check_string) + re.escape(result) + "\n" + rdstr = f"{rdstr + re.escape(check_info.check_string) + re.escape(result)}\n" - log = log + re.escape("scons: Configure: " + result) + ls + ls + log = log + re.escape(f"scons: Configure: {result}") + ls + ls if doCheckLog: lastEnd = match_part_of_configlog(log, logfile, lastEnd) @@ -1431,7 +1430,7 @@ if doCheckLog: lastEnd = match_part_of_configlog(log, logfile, lastEnd) - log = "\t" + re.escape("Configure(confdir = %s)" % sconf_dir) + ls + log = f"\t{re.escape(f'Configure(confdir = {sconf_dir})')}" + ls if doCheckLog: lastEnd = match_part_of_configlog(log, logfile, lastEnd) @@ -1439,7 +1438,7 @@ cnt = 0 for check, result, cache_desc in zip(checks, results, cached): - log = re.escape("scons: Configure: " + check) + ls + log = re.escape(f"scons: Configure: {check}") + ls if doCheckLog: lastEnd = match_part_of_configlog(log, logfile, lastEnd) @@ -1472,9 +1471,9 @@ # rebuild will pass if ext in ['.c', '.cpp']: log = log + conf_filename + re.escape(" <-") + ls - log = log + r"( \|" + nols + "*" + ls + ")+?" + log = f"{log}( \\|{nols}*{ls})+?" else: - log = log + "(" + nols + "*" + ls + ")*?" + log = f"{log}({nols}*{ls})*?" result_cached = 0 if flag == self.CR: # CR = cached rebuild (up to date)s @@ -1485,10 +1484,10 @@ re.escape("\" is up to date.") + ls log = log + re.escape("scons: Configure: The original builder " "output was:") + ls - log = log + r"( \|.*" + ls + ")+" + log = f"{log}( \\|.*{ls})+" if flag == self.NCF: # non-cached rebuild failure - log = log + "(" + nols + "*" + ls + ")*?" + log = f"{log}({nols}*{ls})*?" result_cached = 0 if flag == self.CF: # cached rebuild failure @@ -1497,14 +1496,14 @@ conf_filename + \ re.escape("\" failed in a previous run and all its sources are up to date.") + ls log = log + re.escape("scons: Configure: The original builder output was:") + ls - log = log + r"( \|.*" + ls + ")+" + log = f"{log}( \\|.*{ls})+" # cnt = cnt + 1 if result_cached: - result = "(cached) " + result + result = f"(cached) {result}" - rdstr = rdstr + re.escape(check) + re.escape(result) + "\n" + rdstr = f"{rdstr + re.escape(check) + re.escape(result)}\n" - log = log + re.escape("scons: Configure: " + result) + ls + ls + log = log + re.escape(f"scons: Configure: {result}") + ls + ls if doCheckLog: lastEnd = match_part_of_configlog(log, logfile, lastEnd) @@ -1636,7 +1635,7 @@ kw['stdin'] = True sconsflags = initialize_sconsflags(self.ignore_python_version) try: - p = TestCommon.start(self, *args, **kw) + p = super().start(*args, **kw) finally: restore_sconsflags(sconsflags) return p @@ -1648,7 +1647,7 @@ waited = 0.0 while not os.path.exists(fname): if timeout and waited >= timeout: - sys.stderr.write('timed out waiting for %s to exist\n' % fname) + sys.stderr.write(f'timed out waiting for {fname} to exist\n') if popen: popen.stdin.close() popen.stdin = None @@ -1656,11 +1655,11 @@ self.finish(popen) stdout = self.stdout() if stdout: - sys.stdout.write(self.banner('STDOUT ') + '\n') + sys.stdout.write(f"{self.banner('STDOUT ')}\n") sys.stdout.write(stdout) stderr = self.stderr() if stderr: - sys.stderr.write(self.banner('STDERR ') + '\n') + sys.stderr.write(f"{self.banner('STDERR ')}\n") sys.stderr.write(stderr) self.fail_test() time.sleep(1.0) @@ -1805,7 +1804,7 @@ if 'options' not in kw and self.variables: options = [] for variable, value in self.variables.items(): - options.append('%s=%s' % (variable, value)) + options.append(f'{variable}={value}') kw['options'] = ' '.join(options) if self.calibrate: self.calibration(*args, **kw) @@ -1819,8 +1818,8 @@ fmt = "TRACE: graph=%s name=%s value=%s units=%s" line = fmt % (graph, name, value, units) if sort is not None: - line = line + (' sort=%s' % sort) - line = line + '\n' + line = f"{line} sort={sort}" + line = f"{line}\n" sys.stdout.write(line) sys.stdout.flush() @@ -1864,7 +1863,7 @@ options = kw.get('options', '') if additional is not None: options += additional - kw['options'] = options + ' --debug=memory,time' + kw['options'] = f"{options} --debug=memory,time" def startup(self, *args, **kw): """ @@ -1910,8 +1909,8 @@ self.run(*args, **kw) for variable in self.calibrate_variables: value = self.variables[variable] - sys.stdout.write('VARIABLE: %s=%s\n' % (variable, value)) - sys.stdout.write('ELAPSED: %s\n' % self.elapsed_time()) + sys.stdout.write(f'VARIABLE: {variable}={value}\n') + sys.stdout.write(f'ELAPSED: {self.elapsed_time()}\n') def null(self, *args, **kw): """ @@ -1995,12 +1994,12 @@ """ s = "" for arg in arguments.split(): - s = s + "scons: `%s' is up to date.\n" % arg + s = f"{s}scons: `{arg}' is up to date.\n" kw['arguments'] = arguments stdout = self.wrap_stdout(read_str="REPLACEME", build_str=s) # Append '.*' so that timing output that comes after the # up-to-date output is okay. - stdout = re.escape(stdout) + '.*' + stdout = f"{re.escape(stdout)}.*" stdout = stdout.replace('REPLACEME', read_str) kw['stdout'] = stdout kw['match'] = self.match_re_dotall diff -Nru scons-4.4.0+dfsg/testing/framework/TestSCons_time.py scons-4.5.2+dfsg/testing/framework/TestSCons_time.py --- scons-4.4.0+dfsg/testing/framework/TestSCons_time.py 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/testing/framework/TestSCons_time.py 2023-03-21 16:17:04.000000000 +0000 @@ -73,8 +73,7 @@ exec(script) """ -svn_py = """\ -#!/usr/bin/env python +svn_py = f"""#!/usr/bin/env python import os import sys @@ -82,12 +81,11 @@ script_dir = dir + '/scripts' os.makedirs(script_dir) with open(script_dir + '/scons.py', 'w') as f: - f.write(r'''%s''') -""" % scons_py + f.write(r'''{scons_py}''') +""" -git_py = """\ -#!/usr/bin/env python +git_py = f"""#!/usr/bin/env python import os import sys @@ -95,8 +93,8 @@ script_dir = dir + '/scripts' os.makedirs(script_dir) with open(script_dir + '/scons.py', 'w') as f: - f.write(r'''%s''') -""" % scons_py + f.write(r'''{scons_py}''') +""" logfile_contents = """\ @@ -243,7 +241,7 @@ args = (tempdir, 'scons-time-',) + args x = os.path.join(*args) x = re.escape(x) - x = x.replace('time\\-', 'time\\-[^%s]*' % sep) + x = x.replace('time\\-', f'time\\-[^{sep}]*') return x def write_fake_scons_py(self): diff -Nru scons-4.4.0+dfsg/.travis.yml scons-4.5.2+dfsg/.travis.yml --- scons-4.4.0+dfsg/.travis.yml 2022-07-30 21:48:28.000000000 +0000 +++ scons-4.5.2+dfsg/.travis.yml 2023-03-21 16:17:04.000000000 +0000 @@ -15,7 +15,7 @@ install: # needed for Docbook tests, must be in virtualenv context - - pip install -r requirements.txt + - pip install -r requirements-dev.txt # do the rest of the image setup - ./.travis/install.sh